List of usage examples for org.apache.commons.compress.archivers.tar TarArchiveInputStream TarArchiveInputStream
public TarArchiveInputStream(InputStream is)
From source file:org.apache.flex.utilities.converter.flash.FlashConverter.java
/** * This method generates those artifacts that resemble the runtime part of the Flash SDK. * * @throws ConverterException/*from w ww .j a va2 s . co m*/ */ protected void generateRuntimeArtifacts() throws ConverterException { // Create a list of all libs that should belong to the Flash SDK runtime. final File directory = new File(rootSourceDirectory, "runtimes" + File.separator + "player"); if (!directory.exists() || !directory.isDirectory()) { System.out.println("Skipping runtime generation."); return; } final List<File> playerVersions = new ArrayList<File>(); playerVersions.addAll(Arrays.asList(directory.listFiles(new FlashRuntimeFilter()))); // In really old SDKs the flash-player was installed in the players directory directly. if (new File(directory, "win").exists()) { playerVersions.add(directory); } // Generate artifacts for every jar in the input directories. for (final File versionDir : playerVersions) { // The flash-player 9 is installed directly in the player directory. String playerVersionString; if (versionDir == directory) { playerVersionString = "9.0"; } else { playerVersionString = versionDir.getName(); } final double playerVersion = Double.valueOf(playerVersionString); final NumberFormat doubleFormat = NumberFormat.getInstance(Locale.US); doubleFormat.setMinimumFractionDigits(1); doubleFormat.setMaximumFractionDigits(1); final String version = doubleFormat.format(playerVersion); final MavenArtifact playerArtifact = new MavenArtifact(); playerArtifact.setGroupId("com.adobe.flash"); playerArtifact.setArtifactId("runtime"); playerArtifact.setVersion(version); playerArtifact.setPackaging("exe"); // Deploy Windows binaries. final File windowsDirectory = new File(versionDir, "win"); if (windowsDirectory.exists()) { // Find out if a flash-player binary exists. File flashPlayerBinary = null; if (new File(windowsDirectory, "FlashPlayerDebugger.exe").exists()) { flashPlayerBinary = new File(windowsDirectory, "FlashPlayerDebugger.exe"); } else if (new File(windowsDirectory, "FlashPlayer.exe").exists()) { flashPlayerBinary = new File(windowsDirectory, "FlashPlayer.exe"); } // If a binary exists, copy it to the target and create a pom for it. if (flashPlayerBinary != null) { playerArtifact.addBinaryArtifact("win", flashPlayerBinary); } } // Deploy Mac binaries. final File macDirectory = new File(versionDir, "mac"); if (macDirectory.exists()) { // Find out if a flash-player binary exists. File flashPlayerBinary = null; if (new File(macDirectory, "Flash Player.app.zip").exists()) { flashPlayerBinary = new File(macDirectory, "Flash Player.app.zip"); } else if (new File(macDirectory, "Flash Player Debugger.app.zip").exists()) { flashPlayerBinary = new File(macDirectory, "Flash Player Debugger.app.zip"); } // If a binary exists, copy it to the target and create a pom for it. if (flashPlayerBinary != null) { playerArtifact.addBinaryArtifact("mac", flashPlayerBinary); } } // Deploy Linux binaries. final File lnxDirectory = new File(versionDir, "lnx"); if (lnxDirectory.exists()) { // Find out if a flash-player binary exists. File flashPlayerBinary; if (new File(lnxDirectory, "flashplayer.tar.gz").exists()) { flashPlayerBinary = new File(lnxDirectory, "flashplayer.tar.gz"); } else if (new File(lnxDirectory, "flashplayerdebugger.tar.gz").exists()) { flashPlayerBinary = new File(lnxDirectory, "flashplayerdebugger.tar.gz"); } else { throw new ConverterException("Couldn't find player archive."); } // Decompress the archive. // First unzip it. final FileInputStream fin; try { fin = new FileInputStream(flashPlayerBinary); final BufferedInputStream in = new BufferedInputStream(fin); final File tempTarFile = File.createTempFile("flex-sdk-linux-flashplayer-binary-" + version, ".tar"); final FileOutputStream out = new FileOutputStream(tempTarFile); final GzipCompressorInputStream gzIn = new GzipCompressorInputStream(in); final byte[] buffer = new byte[1024]; int n; while (-1 != (n = gzIn.read(buffer))) { out.write(buffer, 0, n); } out.close(); gzIn.close(); // Then untar it. File uncompressedBinary = null; final FileInputStream tarFileInputStream = new FileInputStream(tempTarFile); final TarArchiveInputStream tarArchiveInputStream = new TarArchiveInputStream( tarFileInputStream); ArchiveEntry entry; while ((entry = tarArchiveInputStream.getNextEntry()) != null) { if ("flashplayer".equals(entry.getName())) { uncompressedBinary = File.createTempFile("flex-sdk-linux-flashplayer-binary-" + version, ".uexe"); final FileOutputStream uncompressedBinaryOutputStream = new FileOutputStream( uncompressedBinary); while (-1 != (n = tarArchiveInputStream.read(buffer))) { uncompressedBinaryOutputStream.write(buffer, 0, n); } uncompressedBinaryOutputStream.close(); } else if ("flashplayerdebugger".equals(entry.getName())) { uncompressedBinary = File.createTempFile("flex-sdk-linux-flashplayer-binary-" + version, ".uexe"); final FileOutputStream uncompressedBinaryOutputStream = new FileOutputStream( uncompressedBinary); while (-1 != (n = tarArchiveInputStream.read(buffer))) { uncompressedBinaryOutputStream.write(buffer, 0, n); } uncompressedBinaryOutputStream.close(); } } tarFileInputStream.close(); // If a binary exists, copy it to the target and create a pom for it. if (uncompressedBinary != null) { playerArtifact.addBinaryArtifact("linux", flashPlayerBinary); } } catch (FileNotFoundException e) { throw new ConverterException("Error processing the linux player tar file", e); } catch (IOException e) { throw new ConverterException("Error processing the linux player tar file", e); } } // Write this artifact to file. writeArtifact(playerArtifact); } }
From source file:org.apache.flex.utilities.converter.retrievers.BaseRetriever.java
protected void unpack(File inputArchive, File targetDirectory) throws RetrieverException { if (!targetDirectory.mkdirs()) { throw new RetrieverException( "Unable to create extraction directory " + targetDirectory.getAbsolutePath()); }// w w w . j a v a2 s . co m ArchiveInputStream archiveInputStream = null; ArchiveEntry entry; try { final CountingInputStream inputStream = new CountingInputStream(new FileInputStream(inputArchive)); final long inputFileSize = inputArchive.length(); if (inputArchive.getName().endsWith(".tbz2")) { archiveInputStream = new TarArchiveInputStream(new BZip2CompressorInputStream(inputStream)); } else { archiveInputStream = new ArchiveStreamFactory() .createArchiveInputStream(new BufferedInputStream(inputStream)); } final ProgressBar progressBar = new ProgressBar(inputFileSize); while ((entry = archiveInputStream.getNextEntry()) != null) { final File outputFile = new File(targetDirectory, entry.getName()); // Entry is a directory. if (entry.isDirectory()) { if (!outputFile.exists()) { if (!outputFile.mkdirs()) { throw new RetrieverException( "Could not create output directory " + outputFile.getAbsolutePath()); } } } // Entry is a file. else { final byte[] data = new byte[BUFFER_MAX]; final FileOutputStream fos = new FileOutputStream(outputFile); BufferedOutputStream dest = null; try { dest = new BufferedOutputStream(fos, BUFFER_MAX); int count; while ((count = archiveInputStream.read(data, 0, BUFFER_MAX)) != -1) { dest.write(data, 0, count); progressBar.updateProgress(inputStream.getBytesRead()); } } finally { if (dest != null) { dest.flush(); dest.close(); } } } progressBar.updateProgress(inputStream.getBytesRead()); } } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } catch (ArchiveException e) { e.printStackTrace(); } finally { if (archiveInputStream != null) { try { archiveInputStream.close(); } catch (Exception e) { // Ignore... } } } }
From source file:org.apache.flume.test.util.StagedInstall.java
private void untarTarFile(File tarFile, File destDir) throws Exception { TarArchiveInputStream tarInputStream = null; try {/* w ww . j a v a 2 s .c o m*/ tarInputStream = new TarArchiveInputStream(new FileInputStream(tarFile)); TarArchiveEntry entry = null; while ((entry = tarInputStream.getNextTarEntry()) != null) { String name = entry.getName(); LOGGER.debug("Next file: " + name); File destFile = new File(destDir, entry.getName()); if (entry.isDirectory()) { destFile.mkdirs(); continue; } File destParent = destFile.getParentFile(); destParent.mkdirs(); OutputStream entryOutputStream = null; try { entryOutputStream = new FileOutputStream(destFile); byte[] buffer = new byte[2048]; int length = 0; while ((length = tarInputStream.read(buffer, 0, 2048)) != -1) { entryOutputStream.write(buffer, 0, length); } } catch (Exception ex) { LOGGER.error("Exception while expanding tar file", ex); throw ex; } finally { if (entryOutputStream != null) { try { entryOutputStream.close(); } catch (Exception ex) { LOGGER.warn("Failed to close entry output stream", ex); } } } } } catch (Exception ex) { LOGGER.error("Exception caught while untarring tar file: " + tarFile.getAbsolutePath(), ex); throw ex; } finally { if (tarInputStream != null) { try { tarInputStream.close(); } catch (Exception ex) { LOGGER.warn("Unable to close tar input stream: " + tarFile.getCanonicalPath(), ex); } } } }
From source file:org.apache.gobblin.data.management.copy.writer.TarArchiveInputStreamDataWriter.java
/** * Untars the passed in {@link FileAwareInputStream} to the task's staging directory. Uses the name of the root * {@link TarArchiveEntry} in the stream as the directory name for the untarred file. The method also commits the data * by moving the file from staging to output directory. * * @see org.apache.gobblin.data.management.copy.writer.FileAwareInputStreamDataWriter#write(org.apache.gobblin.data.management.copy.FileAwareInputStream) *///w w w . ja v a 2 s . co m @Override public void writeImpl(InputStream inputStream, Path writeAt, CopyableFile copyableFile) throws IOException { this.closer.register(inputStream); TarArchiveInputStream tarIn = new TarArchiveInputStream(inputStream); final ReadableByteChannel inputChannel = Channels.newChannel(tarIn); TarArchiveEntry tarEntry; // flush the first entry in the tar, which is just the root directory tarEntry = tarIn.getNextTarEntry(); String tarEntryRootName = StringUtils.remove(tarEntry.getName(), Path.SEPARATOR); log.info("Unarchiving at " + writeAt); try { while ((tarEntry = tarIn.getNextTarEntry()) != null) { // the API tarEntry.getName() is misleading, it is actually the path of the tarEntry in the tar file String newTarEntryPath = tarEntry.getName().replace(tarEntryRootName, writeAt.getName()); Path tarEntryStagingPath = new Path(writeAt.getParent(), newTarEntryPath); if (!FileUtils.isSubPath(writeAt.getParent(), tarEntryStagingPath)) { throw new IOException( String.format("Extracted file: %s is trying to write outside of output directory: %s", tarEntryStagingPath, writeAt.getParent())); } if (tarEntry.isDirectory() && !this.fs.exists(tarEntryStagingPath)) { this.fs.mkdirs(tarEntryStagingPath); } else if (!tarEntry.isDirectory()) { FSDataOutputStream out = this.fs.create(tarEntryStagingPath, true); final WritableByteChannel outputChannel = Channels.newChannel(out); try { StreamCopier copier = new StreamCopier(inputChannel, outputChannel); if (isInstrumentationEnabled()) { copier.withCopySpeedMeter(this.copySpeedMeter); } this.bytesWritten.addAndGet(copier.copy()); if (isInstrumentationEnabled()) { log.info("File {}: copied {} bytes, average rate: {} B/s", copyableFile.getOrigin().getPath(), this.copySpeedMeter.getCount(), this.copySpeedMeter.getMeanRate()); } else { log.info("File {} copied.", copyableFile.getOrigin().getPath()); } } finally { out.close(); outputChannel.close(); } } } } finally { tarIn.close(); inputChannel.close(); inputStream.close(); } }
From source file:org.apache.hadoop.fs.tar.TarFSUtils.java
public static TarArchiveInputStream createTarInputStream(FileSystem fs, Path tarPath, long offset) throws URISyntaxException, IOException { FSDataInputStream fsdis = fs.open(tarPath); fsdis.seek(offset);// w w w. j a va 2 s. c om return new TarArchiveInputStream(new BufferedInputStream(fsdis)); }
From source file:org.apache.hadoop.fs.tar.TarFSUtils.java
@Deprecated public static TarArchiveInputStream createTarInputStream(InputStream in) throws IOException { return new TarArchiveInputStream(in); }
From source file:org.apache.ignite.testsuites.IgniteHadoopTestSuite.java
/** * Downloads and extracts an Apache product. * * @param appName Name of application for log messages. * @param homeVariable Pointer to home directory of the component. * @param downloadPath Relative download path of tar package. * @param destName Local directory name to install component. * @throws Exception If failed.// ww w. j a v a 2 s . c om */ private static void download(String appName, String homeVariable, String downloadPath, String destName) throws Exception { String homeVal = IgniteSystemProperties.getString(homeVariable); if (!F.isEmpty(homeVal) && new File(homeVal).isDirectory()) { X.println(homeVariable + " is set to: " + homeVal); return; } List<String> urls = F.asList("http://archive.apache.org/dist/", "http://apache-mirror.rbc.ru/pub/apache/", "http://www.eu.apache.org/dist/", "http://www.us.apache.org/dist/"); String tmpPath = System.getProperty("java.io.tmpdir"); X.println("tmp: " + tmpPath); final File install = new File(tmpPath + File.separatorChar + "__hadoop"); final File home = new File(install, destName); X.println("Setting " + homeVariable + " to " + home.getAbsolutePath()); System.setProperty(homeVariable, home.getAbsolutePath()); final File successFile = new File(home, "__success"); if (home.exists()) { if (successFile.exists()) { X.println(appName + " distribution already exists."); return; } X.println(appName + " distribution is invalid and it will be deleted."); if (!U.delete(home)) throw new IOException("Failed to delete directory: " + home.getAbsolutePath()); } for (String url : urls) { if (!(install.exists() || install.mkdirs())) throw new IOException("Failed to create directory: " + install.getAbsolutePath()); URL u = new URL(url + downloadPath); X.println("Attempting to download from: " + u); try { URLConnection c = u.openConnection(); c.connect(); try (TarArchiveInputStream in = new TarArchiveInputStream( new GzipCompressorInputStream(new BufferedInputStream(c.getInputStream(), 32 * 1024)))) { TarArchiveEntry entry; while ((entry = in.getNextTarEntry()) != null) { File dest = new File(install, entry.getName()); if (entry.isDirectory()) { if (!dest.mkdirs()) throw new IllegalStateException(); } else if (entry.isSymbolicLink()) { // Important: in Hadoop installation there are symlinks, we need to create them: Path theLinkItself = Paths.get(install.getAbsolutePath(), entry.getName()); Path linkTarget = Paths.get(entry.getLinkName()); Files.createSymbolicLink(theLinkItself, linkTarget); } else { File parent = dest.getParentFile(); if (!(parent.exists() || parent.mkdirs())) throw new IllegalStateException(); X.print(" [" + dest); try (BufferedOutputStream out = new BufferedOutputStream( new FileOutputStream(dest, false), 128 * 1024)) { U.copy(in, out); out.flush(); } Files.setPosixFilePermissions(dest.toPath(), modeToPermissionSet(entry.getMode())); X.println("]"); } } } if (successFile.createNewFile()) return; } catch (Exception e) { e.printStackTrace(); U.delete(home); } } throw new IllegalStateException("Failed to install " + appName + "."); }
From source file:org.apache.karaf.decanter.kibana6.KibanaController.java
public void download() throws Exception { File target = new File(workingDirectory, KIBANA_FOLDER); if (target.exists()) { LOGGER.warn("Kibana folder already exists, download is skipped"); return;//w w w.j a va 2s .co m } LOGGER.debug("Downloading Kibana from {}", KIBANA_LOCATION); if (isWindows()) { try (ZipArchiveInputStream inputStream = new ZipArchiveInputStream( new URL(KIBANA_LOCATION).openStream())) { ZipArchiveEntry entry; while ((entry = (ZipArchiveEntry) inputStream.getNextEntry()) != null) { File file = new File(workingDirectory, entry.getName()); if (entry.isDirectory()) { file.mkdirs(); } else { int read; byte[] buffer = new byte[4096]; try (FileOutputStream outputStream = new FileOutputStream(file)) { while ((read = inputStream.read(buffer, 0, 4096)) != -1) { outputStream.write(buffer, 0, read); } } } } } } else { try (GzipCompressorInputStream gzInputStream = new GzipCompressorInputStream( new URL(KIBANA_LOCATION).openStream())) { try (TarArchiveInputStream inputStream = new TarArchiveInputStream(gzInputStream)) { TarArchiveEntry entry; while ((entry = (TarArchiveEntry) inputStream.getNextEntry()) != null) { File file = new File(workingDirectory, entry.getName()); if (entry.isDirectory()) { file.mkdirs(); } else { int read; byte[] buffer = new byte[4096]; try (FileOutputStream outputStream = new FileOutputStream(file)) { while ((read = inputStream.read(buffer, 0, 4096)) != -1) { outputStream.write(buffer, 0, read); } } file.setLastModified(entry.getLastModifiedDate().getTime()); if (entry instanceof TarArchiveEntry) { int mode = ((TarArchiveEntry) entry).getMode(); if ((mode & 00100) > 0) { file.setExecutable(true, (mode & 00001) == 0); } } } } } } } overrideConfig(); }
From source file:org.apache.karaf.kittests.Helper.java
protected static void extractUnixKit(File targetDir) throws Exception { InputStream is = Helper.class.getResourceAsStream("/karaf.tar.gz"); extract(new TarArchiveInputStream(new GzipCompressorInputStream(is)), targetDir); File bin = new File(targetDir, "bin"); String[] files = bin.list();/*ww w.j a va 2 s . c om*/ List<String> args = new ArrayList(); Collections.addAll(args, "chmod", "+x"); Collections.addAll(args, files); Process chmod = new ProcessBuilder().directory(new File(targetDir, "bin")).command(args).start(); PumpStreamHandler pump = new PumpStreamHandler(System.in, System.out, System.err); pump.attach(chmod); pump.start(); waitForProcessEnd(chmod, 5000); }
From source file:org.apache.karaf.tooling.exam.container.internal.KarafTestContainer.java
private void extractTarGzDistribution(URL sourceDistribution, File _targetFolder) throws IOException { File uncompressedFile = File.createTempFile("uncompressedTarGz-", ".tar"); extractGzArchive(sourceDistribution.openStream(), uncompressedFile); extract(new TarArchiveInputStream(new FileInputStream(uncompressedFile)), _targetFolder); FileUtils.forceDelete(uncompressedFile); }