List of usage examples for org.apache.commons.compress.archivers ArchiveStreamFactory ArchiveStreamFactory
ArchiveStreamFactory
From source file:com.continuuity.weave.kafka.client.KafkaTest.java
private static File extractKafka() throws IOException, ArchiveException, CompressorException { File kafkaExtract = TMP_FOLDER.newFolder(); InputStream kakfaResource = KafkaTest.class.getClassLoader().getResourceAsStream("kafka-0.7.2.tgz"); ArchiveInputStream archiveInput = new ArchiveStreamFactory() .createArchiveInputStream(ArchiveStreamFactory.TAR, new CompressorStreamFactory() .createCompressorInputStream(CompressorStreamFactory.GZIP, kakfaResource)); try {/*from w w w. ja v a2 s . com*/ ArchiveEntry entry = archiveInput.getNextEntry(); while (entry != null) { File file = new File(kafkaExtract, entry.getName()); if (entry.isDirectory()) { file.mkdirs(); } else { ByteStreams.copy(archiveInput, Files.newOutputStreamSupplier(file)); } entry = archiveInput.getNextEntry(); } } finally { archiveInput.close(); } return kafkaExtract; }
From source file:io.magentys.maven.DonutMojo.java
private void zipDonutReport() throws IOException, ArchiveException { Optional<File> file = FileUtils .listFiles(outputDirectory, new RegexFileFilter("^(.*)donut-report.html$"), TrueFileFilter.INSTANCE) .stream().findFirst();/* www . j a v a2s .c om*/ if (!file.isPresent()) throw new FileNotFoundException( String.format("Cannot find a donut report in folder: %s", outputDirectory.getAbsolutePath())); File zipFile = new File(outputDirectory, FilenameUtils.removeExtension(file.get().getName()) + ".zip"); try (OutputStream os = new FileOutputStream(zipFile); ArchiveOutputStream aos = new ArchiveStreamFactory() .createArchiveOutputStream(ArchiveStreamFactory.ZIP, os); BufferedInputStream is = new BufferedInputStream(new FileInputStream(file.get()))) { aos.putArchiveEntry(new ZipArchiveEntry(file.get().getName())); IOUtils.copy(is, aos); aos.closeArchiveEntry(); aos.finish(); } }
From source file:com.github.wolfposd.jdpkg.deb.DpkgDeb.java
public static void writeDebFile(File destination, File[] inputsfiles) throws ArchiveException, IOException { ArchiveOutputStream archive = new ArchiveStreamFactory().createArchiveOutputStream(ArchiveStreamFactory.AR, new FileOutputStream(destination)); for (File file : inputsfiles) { ArArchiveEntry entry = new ArArchiveEntry(file, file.getName()); archive.putArchiveEntry(entry);/* ww w . ja va2 s. c om*/ BufferedInputStream input = new BufferedInputStream(new FileInputStream(file)); IOUtils.copy(input, archive); input.close(); archive.closeArchiveEntry(); } archive.finish(); archive.close(); }
From source file:com.geewhiz.pacify.utils.ArchiveUtils.java
public static void replaceFilesInArchive(File archive, String archiveType, Map<String, File> filesToReplace) { ArchiveStreamFactory factory = new ArchiveStreamFactory(); File manifest = null;//www . j a v a 2 s. c om InputStream archiveStream = null; ArchiveInputStream ais = null; ArchiveOutputStream aos = null; List<FileInputStream> streamsToClose = new ArrayList<FileInputStream>(); File tmpArchive = FileUtils.createEmptyFileWithSamePermissions(archive); try { aos = factory.createArchiveOutputStream(archiveType, new FileOutputStream(tmpArchive)); ChangeSet changes = new ChangeSet(); if (ArchiveStreamFactory.JAR.equalsIgnoreCase(archiveType)) { manifest = manifestWorkaround(archive, archiveType, aos, changes, streamsToClose); } for (String filePath : filesToReplace.keySet()) { File replaceWithFile = filesToReplace.get(filePath); ArchiveEntry archiveEntry = aos.createArchiveEntry(replaceWithFile, filePath); FileInputStream fis = new FileInputStream(replaceWithFile); streamsToClose.add(fis); changes.add(archiveEntry, fis, true); } archiveStream = new FileInputStream(archive); ais = factory.createArchiveInputStream(archiveType, archiveStream); ChangeSetPerformer performer = new ChangeSetPerformer(changes); performer.perform(ais, aos); } catch (IOException e) { throw new RuntimeException(e); } catch (ArchiveException e) { throw new RuntimeException(e); } finally { for (FileInputStream fis : streamsToClose) { IOUtils.closeQuietly(fis); } IOUtils.closeQuietly(aos); IOUtils.closeQuietly(ais); IOUtils.closeQuietly(archiveStream); } if (manifest != null) { manifest.delete(); } if (!archive.delete()) { throw new RuntimeException("Couldn't delete file [" + archive.getPath() + "]... Aborting!"); } if (!tmpArchive.renameTo(archive)) { throw new RuntimeException("Couldn't rename filtered file from [" + tmpArchive.getPath() + "] to [" + archive.getPath() + "]... Aborting!"); } }
From source file:io.github.retz.executor.FileManager.java
private static ArchiveInputStream createAIS(File file) throws FileNotFoundException, IOException, ArchiveException { ArchiveStreamFactory factory = new ArchiveStreamFactory(); InputStream in = new BufferedInputStream(new FileInputStream(file)); if (file.getName().endsWith(".tar.gz") || file.getName().endsWith(".tgz")) { return factory.createArchiveInputStream(ArchiveStreamFactory.TAR, new GZIPInputStream(in)); } else if (file.getName().endsWith(".tar.bz2") || file.getName().endsWith(".tar.xz")) { // TODO: "tar, tbz2, txz. See mesos/src/launcher/fetcher.cpp for supported formats LOG.error("TODO: compression on {} must be supported", file.getName()); throw new RuntimeException(); }/*w w w . j a va2s.c o m*/ LOG.error("Not decompressing. File with unsupported suffix: {}", file); return null; }
From source file:com.digitalpebble.behemoth.util.ContentExtractor.java
private void createArchive(Path dirPath) throws IOException, ArchiveException { FileSystem fsout = FileSystem.get(dirPath.toUri(), getConf()); String archiveType = "zip"; partNum++;//from w w w . j a v a 2 s . c o m FSDataOutputStream currentArchiveOS = fsout .create(new Path(dirPath, "part_" + String.format("%06d", partNum) + "." + archiveType)); currentArchive = new ArchiveStreamFactory().createArchiveOutputStream(archiveType, currentArchiveOS); numEntriesInCurrentArchive = 0; }
From source file:ezbake.deployer.utilities.ArtifactHelpers.java
/** * Append to the given ArchiveInputStream writing to the given outputstream, the given entries to add. * This will duplicate the InputStream to the Output. * * @param inputStream - archive input to append to * @param output - what to copy the modified archive to * @param filesToAdd - what entries to append. *//*w w w .ja v a 2 s .com*/ private static void appendFilesInTarArchive(ArchiveInputStream inputStream, OutputStream output, Iterable<ArtifactDataEntry> filesToAdd) throws DeploymentException { ArchiveStreamFactory asf = new ArchiveStreamFactory(); try { HashMap<String, ArtifactDataEntry> newFiles = new HashMap<>(); for (ArtifactDataEntry entry : filesToAdd) { newFiles.put(entry.getEntry().getName(), entry); } GZIPOutputStream gzs = new GZIPOutputStream(output); TarArchiveOutputStream aos = (TarArchiveOutputStream) asf .createArchiveOutputStream(ArchiveStreamFactory.TAR, gzs); aos.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU); // copy the existing entries ArchiveEntry nextEntry; while ((nextEntry = inputStream.getNextEntry()) != null) { //If we're passing in the same file, don't copy into the new archive if (!newFiles.containsKey(nextEntry.getName())) { aos.putArchiveEntry(nextEntry); IOUtils.copy(inputStream, aos); aos.closeArchiveEntry(); } } for (ArtifactDataEntry entry : filesToAdd) { aos.putArchiveEntry(entry.getEntry()); IOUtils.write(entry.getData(), aos); aos.closeArchiveEntry(); } aos.finish(); gzs.finish(); } catch (ArchiveException | IOException e) { log.error(e.getMessage(), e); throw new DeploymentException(e.getMessage()); } }
From source file:edu.ur.ir.ir_export.service.DefaultCollectionExportService.java
/** * Export all collections in the repository. * /*ww w.j av a 2s . co m*/ * @param repository - repository to export * @throws IOException */ public void export(Repository repository, File zipFileDestination) throws IOException { // create the path if it doesn't exist String path = FilenameUtils.getPath(zipFileDestination.getCanonicalPath()); if (!path.equals("")) { File pathOnly = new File(FilenameUtils.getFullPath(zipFileDestination.getCanonicalPath())); FileUtils.forceMkdir(pathOnly); } File collectionXmlFile = temporaryFileCreator.createTemporaryFile(extension); Set<FileInfo> allPictures = createXmlFile(collectionXmlFile, repository.getInstitutionalCollections(), true); FileOutputStream out = new FileOutputStream(zipFileDestination); ArchiveOutputStream os = null; try { os = new ArchiveStreamFactory().createArchiveOutputStream("zip", out); os.putArchiveEntry(new ZipArchiveEntry("collection.xml")); FileInputStream fis = null; try { log.debug("adding xml file"); fis = new FileInputStream(collectionXmlFile); IOUtils.copy(fis, os); } finally { if (fis != null) { fis.close(); fis = null; } } log.debug("adding pictures size " + allPictures.size()); for (FileInfo fileInfo : allPictures) { File f = new File(fileInfo.getFullPath()); String name = FilenameUtils.getName(fileInfo.getFullPath()); name = name + '.' + fileInfo.getExtension(); log.debug(" adding name " + name); os.putArchiveEntry(new ZipArchiveEntry(name)); try { log.debug("adding input stream"); fis = new FileInputStream(f); IOUtils.copy(fis, os); } finally { if (fis != null) { fis.close(); fis = null; } } } os.closeArchiveEntry(); out.flush(); } catch (ArchiveException e) { throw new IOException(e); } finally { if (os != null) { os.close(); os = null; } } FileUtils.deleteQuietly(collectionXmlFile); }
From source file:com.mirth.connect.util.ArchiveUtils.java
/** * Extracts an archive using generic stream factories provided by commons-compress. *///from www .j a v a 2s . c om private static void extractGenericArchive(File archiveFile, File destinationFolder) throws CompressException { try { InputStream inputStream = new BufferedInputStream(FileUtils.openInputStream(archiveFile)); try { inputStream = new CompressorStreamFactory().createCompressorInputStream(inputStream); } catch (CompressorException e) { // a compressor was not recognized in the stream, in this case we leave the inputStream as-is } ArchiveInputStream archiveInputStream = new ArchiveStreamFactory() .createArchiveInputStream(inputStream); ArchiveEntry entry; int inputOffset = 0; byte[] buffer = new byte[BUFFER_SIZE]; try { while (null != (entry = archiveInputStream.getNextEntry())) { File outputFile = new File( destinationFolder.getAbsolutePath() + IOUtils.DIR_SEPARATOR + entry.getName()); if (entry.isDirectory()) { FileUtils.forceMkdir(outputFile); } else { FileOutputStream outputStream = null; try { outputStream = FileUtils.openOutputStream(outputFile); int bytesRead; int outputOffset = 0; while ((bytesRead = archiveInputStream.read(buffer, inputOffset, BUFFER_SIZE)) > 0) { outputStream.write(buffer, outputOffset, bytesRead); inputOffset += bytesRead; outputOffset += bytesRead; } } finally { IOUtils.closeQuietly(outputStream); } } } } finally { IOUtils.closeQuietly(archiveInputStream); } } catch (Exception e) { throw new CompressException(e); } }
From source file:de.fischer.thotti.core.distbuilder.DistributionBuilder.java
public File buildDistribution() throws Exception, ArchiveException { String mahoutVersion = org.apache.mahout.Version.version(); // @todo Throw an exception if the distfile cannot created File output = targetFile;// ww w .j a va2s.c o m Set<String> addedFiles = new HashSet<String>(); List<FetchResult> fetchedArtifacts = depFetcher.fetchDependencies(); final OutputStream out = new FileOutputStream(output); ArchiveOutputStream os = new ArchiveStreamFactory().createArchiveOutputStream("zip", out); String subDirectory; for (FetchResult fetch : fetchedArtifacts) { String coord = fetch.getArtifactCoordinates(); // @todo This is not safe! It is a hack! if (coord.split(":")[0].equals(MAHOUT_GROUP_ID)) { subDirectory = MAHOUT_LIB_DIRECTORY; } else { subDirectory = EXTERNAL_LIB_DIRECTORY; } for (File file : fetch.getFiles()) { String targetCP = subDirectory + "/" + file.getName(); String targetArchive = BASE_DIRECTORY + "/" + targetCP; if (!addedFiles.contains(targetCP)) { cpBuilder.addPath(targetCP); if (getListener() != null) { StringBuilder sb = new StringBuilder(); sb.append("Add: ").append(file.toURI()).append(" as ").append(targetCP); getListener().onAddingMavenArtifact(sb.toString()); } os.putArchiveEntry(new ZipArchiveEntry(targetArchive)); IOUtils.copy(new FileInputStream(file), os); os.closeArchiveEntry(); addedFiles.add(targetCP); } } } cpBuilder.addPath(DATA_DIRECTORY); for (File file : dataFiles) { String targetCP = DATA_DIRECTORY + "/" + file.getName(); String targetArchive = BASE_DIRECTORY + "/" + targetCP; if (getListener() != null) { StringBuilder sb = new StringBuilder(); sb.append("Add: ").append(file.toURI()).append(" as ").append(targetCP); getListener().onAddingTestData(sb.toString()); } os.putArchiveEntry(new ZipArchiveEntry(targetArchive)); IOUtils.copy(new FileInputStream(file), os); os.closeArchiveEntry(); } String testCaseTargetCP = TEST_CASE_LIB_DIRECTORY + "/" + testArtifact.getName(); String testCaseTargetArchive = BASE_DIRECTORY + "/" + testCaseTargetCP; if (getListener() != null) { StringBuilder sb = new StringBuilder(); sb.append("Add: ").append(testArtifact.toURI()).append(" as ").append(testCaseTargetCP); listener.onAddingTestCases(sb.toString()); } os.putArchiveEntry(new ZipArchiveEntry(testCaseTargetArchive)); IOUtils.copy(new FileInputStream(testArtifact), os); os.closeArchiveEntry(); cpBuilder.addPath(testCaseTargetCP); generateRunSkript(os); out.flush(); os.close(); return output; }