List of usage examples for org.apache.commons.compress.archivers.tar TarArchiveEntry getSize
public long getSize()
From source file:org.dataconservancy.packaging.tool.impl.generator.BagItPackageAssemblerTest.java
/** * Test that the bag-info.txt file contains the parameter information passed in, including * parameters passed in after the initialization. * * Note that the package being generated in this case is empty, so the bag size and payload * oxum values will be 0./* ww w . ja v a 2 s. c om*/ * @throws CompressorException * @throws ArchiveException * @throws IOException */ @Test public void testBagItInfoFile() throws CompressorException, ArchiveException, IOException { final String paramName = "TEST_PARAMETER"; final String paramValue = "test parameter"; underTest.addParameter(paramName, paramValue); Package pkg = underTest.assemblePackage(); CompressorInputStream cis = new CompressorStreamFactory() .createCompressorInputStream(CompressorStreamFactory.GZIP, pkg.serialize()); TarArchiveInputStream ais = (TarArchiveInputStream) (new ArchiveStreamFactory() .createArchiveInputStream(ArchiveStreamFactory.TAR, cis)); String bagInfo = ""; TarArchiveEntry entry = ais.getNextTarEntry(); while (entry != null) { if (entry.getName().contains("bag-info.txt")) { byte[] content = new byte[(int) entry.getSize()]; ais.read(content, 0, (int) entry.getSize()); bagInfo = new String(content); break; } entry = ais.getNextTarEntry(); } // Test that expected initial parameters are present String expected = GeneralParameterNames.PACKAGE_NAME + ": " + packageName; assertTrue("Expected to find: " + expected, bagInfo.contains(expected)); // These two values should be 0 since there is nothing in the test package this time. expected = BagItParameterNames.BAG_SIZE + ": 0"; assertTrue("Expected to find: " + expected, bagInfo.contains(expected)); expected = BagItParameterNames.PAYLOAD_OXUM + ": 0"; assertTrue("Expected to find: " + expected, bagInfo.contains(expected)); // Test the post-init parameter expected = paramName + ": " + paramValue; assertTrue("Expected to find: " + expected, bagInfo.contains(expected)); }
From source file:org.dcm4chee.storage.test.unit.tar.TarContainerProviderTest.java
private static void assertTarEntryEquals(TarArchiveEntry expected, TarArchiveEntry actual) { assertEquals(expected.getName(), actual.getName()); assertEquals(expected.getSize(), actual.getSize()); }
From source file:org.eclipse.acute.OmnisharpStreamConnectionProvider.java
/** * * @return path to server, unzipping it if necessary. Can be null is fragment is missing. *///from w w w.j a v a 2 s.c o m private @Nullable File getServer() throws IOException { File serverPath = new File(AcutePlugin.getDefault().getStateLocation().toFile(), "omnisharp-roslyn"); //$NON-NLS-1$ if (!serverPath.exists()) { serverPath.mkdirs(); try (InputStream stream = FileLocator.openStream(AcutePlugin.getDefault().getBundle(), new Path("omnisharp-roslyn.tar"), true); //$NON-NLS-1$ TarArchiveInputStream tarStream = new TarArchiveInputStream(stream);) { TarArchiveEntry entry = null; while ((entry = tarStream.getNextTarEntry()) != null) { if (!entry.isDirectory()) { File targetFile = new File(serverPath, entry.getName()); targetFile.getParentFile().mkdirs(); InputStream in = new BoundedInputStream(tarStream, entry.getSize()); // mustn't be closed try (FileOutputStream out = new FileOutputStream(targetFile);) { IOUtils.copy(in, out); if (!Platform.OS_WIN32.equals(Platform.getOS())) { int xDigit = entry.getMode() % 10; targetFile.setExecutable(xDigit > 0, (xDigit & 1) == 1); int wDigit = (entry.getMode() / 10) % 10; targetFile.setWritable(wDigit > 0, (wDigit & 1) == 1); int rDigit = (entry.getMode() / 100) % 10; targetFile.setReadable(rDigit > 0, (rDigit & 1) == 1); } } } } } } return serverPath; }
From source file:org.efaps.esjp.admin.update.UpdatePack.java
/** * Check revisions.//from w w w . j ava2s. com * * @param _parameter Parameter as passed by the eFaps API * @return the return * @throws EFapsException on error * @throws InstallationException on error */ public Return execute(final Parameter _parameter) throws EFapsException, InstallationException { final Context context = Context.getThreadContext(); final Context.FileParameter fileItem = context.getFileParameters().get("pack"); final boolean compress = GzipUtils.isCompressedFilename(fileItem.getName()); try (final TarArchiveInputStream tarInput = new TarArchiveInputStream( compress ? new GzipCompressorInputStream(fileItem.getInputStream()) : fileItem.getInputStream());) { File tmpfld = AppConfigHandler.get().getTempFolder(); if (tmpfld == null) { final File temp = File.createTempFile("eFaps", ".tmp"); tmpfld = temp.getParentFile(); temp.delete(); } final File updateFolder = new File(tmpfld, Update.TMPFOLDERNAME); if (!updateFolder.exists()) { updateFolder.mkdirs(); } final File dateFolder = new File(updateFolder, ((Long) new Date().getTime()).toString()); dateFolder.mkdirs(); final Map<String, URL> files = new HashMap<>(); TarArchiveEntry currentEntry = tarInput.getNextTarEntry(); while (currentEntry != null) { final byte[] bytess = new byte[(int) currentEntry.getSize()]; tarInput.read(bytess); final File file = new File(dateFolder.getAbsolutePath() + "/" + currentEntry.getName()); file.getParentFile().mkdirs(); final FileOutputStream output = new FileOutputStream(file); output.write(bytess); output.close(); files.put(currentEntry.getName(), file.toURI().toURL()); currentEntry = tarInput.getNextTarEntry(); } final Map<RevItem, InstallFile> installFiles = new HashMap<>(); final URL json = files.get("revisions.json"); final ObjectMapper mapper = new ObjectMapper(); mapper.registerModule(new JodaModule()); final List<RevItem> items = mapper.readValue(new File(json.toURI()), mapper.getTypeFactory().constructCollectionType(List.class, RevItem.class)); final List<RevItem> allItems = new ArrayList<>(); allItems.addAll(items); installFiles.putAll(getInstallFiles(files, items, CIAdmin.Abstract)); installFiles.putAll(getInstallFiles(files, items, CIAdminUser.Abstract)); installFiles.putAll(getInstallFiles(files, items, CIAdminAccess.AccessSet)); installFiles.putAll(getInstallFiles(files, items, CICommon.DBPropertiesBundle)); final Iterator<RevItem> iter = items.iterator(); int i = 0; while (iter.hasNext()) { final RevItem item = iter.next(); LOG.info("Adding unfound Item {} / {}: {}", i, items.size(), item.getIdentifier()); final InstallFile installFile = new InstallFile().setName(item.getName4InstallFile()) .setURL(item.getURL(files)).setType(item.getFileType().getType()) .setRevision(item.getRevision()).setDate(item.getDate()); installFiles.put(item, installFile); i++; } final List<InstallFile> installFileList = new ArrayList<>(installFiles.values()); Collections.sort(installFileList, new Comparator<InstallFile>() { @Override public int compare(final InstallFile _installFile0, final InstallFile _installFile1) { return _installFile0.getName().compareTo(_installFile1.getName()); } }); final List<InstallFile> dependendFileList = new ArrayList<>(); // check if a object that depends on another object must be added to the update final Map<String, String> depenMap = getDependendMap(); final Set<String> tobeAdded = new HashSet<>(); for (final RevItem item : installFiles.keySet()) { if (depenMap.containsKey(item.getIdentifier())) { tobeAdded.add(depenMap.get(item.getIdentifier())); } } if (!tobeAdded.isEmpty()) { // check if the object to be added is already part ot the list for (final RevItem item : installFiles.keySet()) { final Iterator<String> tobeiter = tobeAdded.iterator(); while (tobeiter.hasNext()) { final String ident = tobeiter.next(); if (item.getIdentifier().equals(ident)) { tobeiter.remove(); } } } } if (!tobeAdded.isEmpty()) { i = 1; // add the objects to the list taht are missing for (final RevItem item : allItems) { if (tobeAdded.contains(item.getIdentifier())) { LOG.info("Adding releated Item {} / {}: {}", i, tobeAdded.size(), item); final InstallFile installFile = new InstallFile().setName(item.getName4InstallFile()) .setURL(item.getURL(files)).setType(item.getFileType().getType()) .setRevision(item.getRevision()).setDate(item.getDate()); dependendFileList.add(installFile); i++; } } } if (!installFileList.isEmpty()) { final Install install = new Install(true); for (final InstallFile installFile : installFileList) { LOG.info("...Adding to Update: '{}' ", installFile.getName()); install.addFile(installFile); } install.updateLatest(null); } if (!dependendFileList.isEmpty()) { LOG.info("Update for related Items"); final Install install = new Install(true); for (final InstallFile installFile : dependendFileList) { LOG.info("...Adding to Update: '{}' ", installFile.getName()); install.addFile(installFile); } install.updateLatest(null); } LOG.info("Terminated update."); } catch (final IOException e) { LOG.error("Catched", e); } catch (final URISyntaxException e) { LOG.error("Catched", e); } return new Return(); }
From source file:org.fabrician.maven.plugins.CompressUtils.java
private static ArchiveEntry createArchiveEntry(ArchiveEntry entry, OutputStream out, String alternateBaseDir) throws IOException { String substitutedName = substituteAlternateBaseDir(entry, alternateBaseDir); if (out instanceof TarArchiveOutputStream) { TarArchiveEntry newEntry = new TarArchiveEntry(substitutedName); newEntry.setSize(entry.getSize()); newEntry.setModTime(entry.getLastModifiedDate()); if (entry instanceof TarArchiveEntry) { TarArchiveEntry old = (TarArchiveEntry) entry; newEntry.setSize(old.getSize()); newEntry.setIds(old.getUserId(), old.getGroupId()); newEntry.setNames(old.getUserName(), old.getGroupName()); }//from w ww .ja va 2 s . co m return newEntry; } else if (entry instanceof ZipArchiveEntry) { ZipArchiveEntry old = (ZipArchiveEntry) entry; ZipArchiveEntry zip = new ZipArchiveEntry(substitutedName); zip.setInternalAttributes(old.getInternalAttributes()); zip.setExternalAttributes(old.getExternalAttributes()); zip.setExtraFields(old.getExtraFields(true)); return zip; } else { return new ZipArchiveEntry(substitutedName); } }
From source file:org.jboss.tools.openshift.reddeer.utils.FileHelper.java
public static void extractTarGz(File archive, File outputDirectory) { InputStream inputStream = null; try {/* w w w. ja va2 s .c o m*/ logger.info("Opening stream to gzip archive"); inputStream = new GzipCompressorInputStream(new FileInputStream(archive)); } catch (IOException ex) { throw new OpenShiftToolsException( "Exception occured while processing tar.gz file.\n" + ex.getMessage()); } logger.info("Opening stream to tar archive"); BufferedOutputStream outputStream = null; TarArchiveInputStream tarArchiveInputStream = new TarArchiveInputStream(inputStream); TarArchiveEntry currentEntry = null; try { while ((currentEntry = tarArchiveInputStream.getNextTarEntry()) != null) { if (currentEntry.isDirectory()) { logger.info("Creating directory: " + currentEntry.getName()); createDirectory(new File(outputDirectory, currentEntry.getName())); } else { File outputFile = new File(outputDirectory, currentEntry.getName()); if (!outputFile.getParentFile().exists()) { logger.info("Creating directory: " + outputFile.getParentFile()); createDirectory(outputFile.getParentFile()); } outputStream = new BufferedOutputStream(new FileOutputStream(outputFile)); logger.info("Extracting file: " + currentEntry.getName()); copy(tarArchiveInputStream, outputStream, (int) currentEntry.getSize()); outputStream.close(); outputFile.setExecutable(true); outputFile.setReadable(true); outputFile.setWritable(true); } } } catch (IOException e) { throw new OpenShiftToolsException("Exception occured while processing tar.gz file.\n" + e.getMessage()); } finally { try { tarArchiveInputStream.close(); } catch (Exception ex) { } try { outputStream.close(); } catch (Exception ex) { } } }
From source file:org.jenkinsci.plugins.os_ci.utils.CompressUtils.java
public static void untarFile(File file) throws IOException { FileInputStream fileInputStream = null; String currentDir = file.getParent(); try {/* ww w. j ava2 s. c om*/ fileInputStream = new FileInputStream(file); GZIPInputStream gzipInputStream = new GZIPInputStream(fileInputStream); TarArchiveInputStream tarArchiveInputStream = new TarArchiveInputStream(gzipInputStream); TarArchiveEntry tarArchiveEntry; while (null != (tarArchiveEntry = tarArchiveInputStream.getNextTarEntry())) { if (tarArchiveEntry.isDirectory()) { FileUtils.forceMkdir(new File(currentDir + File.separator + tarArchiveEntry.getName())); } else { byte[] content = new byte[(int) tarArchiveEntry.getSize()]; int offset = 0; tarArchiveInputStream.read(content, offset, content.length - offset); FileOutputStream outputFile = new FileOutputStream( currentDir + File.separator + tarArchiveEntry.getName()); org.apache.commons.io.IOUtils.write(content, outputFile); outputFile.close(); } } } catch (FileNotFoundException e) { throw new IOException(e.getStackTrace().toString()); } }
From source file:org.kitesdk.cli.commands.TarImportCommand.java
@Override public int run() throws IOException { Preconditions.checkArgument(targets != null && targets.size() == 2, "Tar path and target dataset URI are required."); Preconditions.checkArgument(SUPPORTED_TAR_COMPRESSION_TYPES.contains(compressionType), "Compression type " + compressionType + " is not supported"); String source = targets.get(0); String datasetUri = targets.get(1); long blockSize = getConf().getLong("dfs.blocksize", DEFAULT_BLOCK_SIZE); int success = 0; View<TarFileEntry> targetDataset; if (Datasets.exists(datasetUri)) { console.debug("Using existing dataset: {}", datasetUri); targetDataset = Datasets.load(datasetUri, TarFileEntry.class); } else {//from w w w . ja v a2 s. c o m console.info("Creating new dataset: {}", datasetUri); DatasetDescriptor.Builder descriptorBuilder = new DatasetDescriptor.Builder(); descriptorBuilder.format(Formats.AVRO); descriptorBuilder.schema(TarFileEntry.class); targetDataset = Datasets.create(datasetUri, descriptorBuilder.build(), TarFileEntry.class); } DatasetWriter<TarFileEntry> writer = targetDataset.newWriter(); // Create a Tar input stream wrapped in appropriate decompressor // TODO: Enhancement would be to use native compression libs TarArchiveInputStream tis; CompressionType tarCompressionType = CompressionType.NONE; if (compressionType.isEmpty()) { if (source.endsWith(".tar")) { tarCompressionType = CompressionType.NONE; } else if (source.endsWith(".tar.gz")) { tarCompressionType = CompressionType.GZIP; } else if (source.endsWith(".tar.bz2")) { tarCompressionType = CompressionType.BZIP2; } } else if (compressionType.equals("gzip")) { tarCompressionType = CompressionType.GZIP; } else if (compressionType.equals("bzip2")) { tarCompressionType = CompressionType.BZIP2; } else { tarCompressionType = CompressionType.NONE; } console.info("Using {} compression", tarCompressionType); switch (tarCompressionType) { case GZIP: tis = new TarArchiveInputStream(new GzipCompressorInputStream(open(source))); break; case BZIP2: tis = new TarArchiveInputStream(new BZip2CompressorInputStream(open(source))); break; case NONE: default: tis = new TarArchiveInputStream(open(source)); } TarArchiveEntry entry; try { int count = 0; while ((entry = tis.getNextTarEntry()) != null) { if (!entry.isDirectory()) { long size = entry.getSize(); if (size >= blockSize) { console.warn( "Entry \"{}\" (size {}) is larger than the " + "HDFS block size of {}. This may result in remote block reads", new Object[] { entry.getName(), size, blockSize }); } byte[] buf = new byte[(int) size]; try { IOUtils.readFully(tis, buf, 0, (int) size); } catch (IOException e) { console.error("Did not read entry {} successfully (entry size {})", entry.getName(), size); success = 1; throw e; } writer.write(TarFileEntry.newBuilder().setFilename(entry.getName()) .setFilecontent(ByteBuffer.wrap(buf)).build()); count++; } } console.info("Added {} records to \"{}\"", count, targetDataset.getDataset().getName()); } finally { IOUtils.closeStream(writer); IOUtils.closeStream(tis); } return success; }
From source file:org.lobid.lodmill.TarReader.java
@Override public void process(final Reader reader) { TarArchiveInputStream tarInputStream = null; try {//from w w w. j av a2 s . co m tarInputStream = new TarArchiveInputStream(new ReaderInputStream(reader)); TarArchiveEntry entry = null; while ((entry = (TarArchiveEntry) tarInputStream.getNextEntry()) != null) { if (!entry.isDirectory()) { byte[] buffer = new byte[(int) entry.getSize()]; while ((tarInputStream.read(buffer)) > 0) { getReceiver().process(new StringReader(new String(buffer))); } } } tarInputStream.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally { IOUtils.closeQuietly(tarInputStream); } }
From source file:org.mulima.internal.freedb.FreeDbTarDaoImpl.java
/** * {@inheritDoc}//from ww w .j a va 2 s. c om */ @Override public List<Disc> getAllDiscsFromOffset(int startNum, int numToRead) { FileInputStream fin = null; BufferedInputStream bfin = null; TarArchiveInputStream tin = null; List<Disc> discs = new ArrayList<Disc>(); try { fin = new FileInputStream(tarArchive); bfin = new BufferedInputStream(fin); tin = new TarArchiveInputStream(bfin); int currentNum = 0; TarArchiveEntry entry = tin.getNextTarEntry(); ProgressBar progress = new SLF4JProgressBar("TAR getDiscs", numToRead); while (entry != null && (numToRead < 0 || currentNum < startNum + numToRead)) { if (!entry.isDirectory() && currentNum >= startNum) { logger.debug("Loading: " + entry.getName()); int offset = 0; byte[] content = new byte[(int) entry.getSize()]; while (offset < content.length) { offset += tin.read(content, offset, content.length - offset); } Disc disc = bytesToDisc(content); if (disc == null) { logger.warn("Invalid file: " + entry.getName()); } else { logger.debug(disc.toString()); discs.add(disc); } } entry = tin.getNextTarEntry(); currentNum++; progress.next(); } if (entry == null) { progress.done(); } } catch (IOException e) { logger.error("Problem reading tar archive.", e); throw new UncheckedIOException("Problem reading tar archive.", e); } finally { try { if (tin != null) { tin.close(); } else if (bfin != null) { bfin.close(); } else if (fin != null) { fin.close(); } } catch (IOException e) { logger.error("Problem closing streams.", e); } } return discs; }