List of usage examples for org.apache.commons.compress.archivers.tar TarArchiveEntry getMode
public int getMode()
From source file:io.takari.maven.testing.executor.junit.MavenVersionResolver.java
private void unarchive(File archive, File directory) throws IOException { try (TarArchiveInputStream ais = new TarArchiveInputStream( new GzipCompressorInputStream(new FileInputStream(archive)))) { TarArchiveEntry entry; while ((entry = ais.getNextTarEntry()) != null) { if (entry.isFile()) { String name = entry.getName(); File file = new File(directory, name); file.getParentFile().mkdirs(); try (OutputStream os = new BufferedOutputStream(new FileOutputStream(file))) { copy(ais, os);/*from w w w.ja v a 2 s . c om*/ } int mode = entry.getMode(); if (mode != -1 && (mode & 0100) != 0) { try { Path path = file.toPath(); Set<PosixFilePermission> permissions = Files.getPosixFilePermissions(path); permissions.add(PosixFilePermission.OWNER_EXECUTE); Files.setPosixFilePermissions(path, permissions); } catch (UnsupportedOperationException e) { // must be windows, ignore } } } } } }
From source file:com.redhat.red.offliner.ftest.SinglePlaintextDownloadOfTarballFTest.java
private File makeTarball(final Map<String, byte[]> entries) throws IOException { File tgz = temporaryFolder.newFile(); try (TarArchiveOutputStream tarOut = new TarArchiveOutputStream( new GzipCompressorOutputStream(new FileOutputStream(tgz)))) { entries.forEach((name, content) -> { try { File entryFile = temporaryFolder.newFile(); FileUtils.writeByteArrayToFile(entryFile, content); TarArchiveEntry entry = new TarArchiveEntry(entryFile, name); // entry.setSize( content.length ); // entry.setMode( 0644 ); // entry.setGroupId( 1000 ); // entry.setUserId( 1000 ); tarOut.putArchiveEntry(entry); System.out.printf("Entry: %s mode: '0%s'\n", entry.getName(), Integer.toString(entry.getMode(), 8)); tarOut.write(content);//from w w w . jav a 2s .c om tarOut.closeArchiveEntry(); } catch (IOException e) { e.printStackTrace(); fail("Failed to write tarball"); } }); tarOut.flush(); } try (TarArchiveInputStream tarIn = new TarArchiveInputStream( new GzipCompressorInputStream(new FileInputStream(tgz)))) { TarArchiveEntry entry = null; while ((entry = tarIn.getNextTarEntry()) != null) { byte[] entryData = new byte[(int) entry.getSize()]; int read = tarIn.read(entryData, 0, entryData.length); assertThat("Not enough bytes read for: " + entry.getName(), read, equalTo((int) entry.getSize())); assertThat(entry.getName() + ": data doesn't match input", Arrays.equals(entries.get(entry.getName()), entryData), equalTo(true)); } } return tgz; }
From source file:com.playonlinux.core.utils.archive.Tar.java
/** * Uncompress a tar// w w w . j a v a2s.c o m * * @param countingInputStream * to count the number of byte extracted * @param outputDir * The directory where files should be extracted * @return A list of extracted files * @throws ArchiveException * if the process fails */ private List<File> uncompress(final InputStream inputStream, CountingInputStream countingInputStream, final File outputDir, long finalSize, Consumer<ProgressEntity> stateCallback) { final List<File> uncompressedFiles = new LinkedList<>(); try (ArchiveInputStream debInputStream = new ArchiveStreamFactory().createArchiveInputStream("tar", inputStream)) { TarArchiveEntry entry; while ((entry = (TarArchiveEntry) debInputStream.getNextEntry()) != null) { final File outputFile = new File(outputDir, entry.getName()); if (entry.isDirectory()) { LOGGER.info(String.format("Attempting to write output directory %s.", outputFile.getAbsolutePath())); if (!outputFile.exists()) { LOGGER.info(String.format("Attempting to createPrefix output directory %s.", outputFile.getAbsolutePath())); Files.createDirectories(outputFile.toPath()); } } else { LOGGER.info(String.format("Creating output file %s (%s).", outputFile.getAbsolutePath(), entry.getMode())); if (entry.isSymbolicLink()) { Files.createSymbolicLink(Paths.get(outputFile.getAbsolutePath()), Paths.get(entry.getLinkName())); } else { try (final OutputStream outputFileStream = new FileOutputStream(outputFile)) { IOUtils.copy(debInputStream, outputFileStream); Files.setPosixFilePermissions(Paths.get(outputFile.getPath()), com.playonlinux.core.utils.Files.octToPosixFilePermission(entry.getMode())); } } } uncompressedFiles.add(outputFile); stateCallback.accept(new ProgressEntity.Builder() .withPercent((double) countingInputStream.getCount() / (double) finalSize * (double) 100) .withProgressText("Extracting " + outputFile.getName()).build()); } return uncompressedFiles; } catch (IOException | org.apache.commons.compress.archivers.ArchiveException e) { throw new ArchiveException("Unable to extract the file", e); } }
From source file:ezbake.deployer.publishers.openShift.RhcApplication.java
private void writeTarFileToProjectDirectory(byte[] artifact) throws DeploymentException { final File gitDbPath = gitRepo.getRepository().getDirectory(); final File projectDir = gitDbPath.getParentFile(); try {//from w ww . j av a 2 s . com CompressorInputStream uncompressedInput = new GzipCompressorInputStream( new ByteArrayInputStream(artifact)); TarArchiveInputStream inputStream = new TarArchiveInputStream(uncompressedInput); // copy the existing entries TarArchiveEntry nextEntry; while ((nextEntry = (TarArchiveEntry) inputStream.getNextEntry()) != null) { File fileToWrite = new File(projectDir, nextEntry.getName()); if (nextEntry.isDirectory()) { fileToWrite.mkdirs(); } else { File house = fileToWrite.getParentFile(); if (!house.exists()) { house.mkdirs(); } copyInputStreamToFile(inputStream, fileToWrite); Files.setPosixFilePermissions(fileToWrite, nextEntry.getMode()); } } } catch (IOException e) { log.error("[" + getApplicationName() + "]" + e.getMessage(), e); throw new DeploymentException(e.getMessage()); } }
From source file:com.google.devtools.build.lib.bazel.repository.TarGzFunction.java
@Nullable @Override//from w w w.jav a 2 s . c o m public SkyValue compute(SkyKey skyKey, Environment env) throws RepositoryFunctionException { DecompressorDescriptor descriptor = (DecompressorDescriptor) skyKey.argument(); Optional<String> prefix = descriptor.prefix(); boolean foundPrefix = false; try (GZIPInputStream gzipStream = new GZIPInputStream( new FileInputStream(descriptor.archivePath().getPathFile()))) { TarArchiveInputStream tarStream = new TarArchiveInputStream(gzipStream); TarArchiveEntry entry; while ((entry = tarStream.getNextTarEntry()) != null) { StripPrefixedPath entryPath = StripPrefixedPath.maybeDeprefix(entry.getName(), prefix); foundPrefix = foundPrefix || entryPath.foundPrefix(); if (entryPath.skip()) { continue; } Path filename = descriptor.repositoryPath().getRelative(entryPath.getPathFragment()); FileSystemUtils.createDirectoryAndParents(filename.getParentDirectory()); if (entry.isDirectory()) { FileSystemUtils.createDirectoryAndParents(filename); } else { if (entry.isSymbolicLink()) { PathFragment linkName = new PathFragment(entry.getLinkName()); if (linkName.isAbsolute()) { linkName = linkName.relativeTo(PathFragment.ROOT_DIR); linkName = descriptor.repositoryPath().getRelative(linkName).asFragment(); } FileSystemUtils.ensureSymbolicLink(filename, linkName); } else { Files.copy(tarStream, filename.getPathFile().toPath(), StandardCopyOption.REPLACE_EXISTING); filename.chmod(entry.getMode()); } } } } catch (IOException e) { throw new RepositoryFunctionException(e, Transience.TRANSIENT); } if (prefix.isPresent() && !foundPrefix) { throw new RepositoryFunctionException( new IOException("Prefix " + prefix.get() + " was given, but not found in the archive"), Transience.PERSISTENT); } return new DecompressorValue(descriptor.repositoryPath()); }
From source file:com.google.cloud.tools.managedcloudsdk.install.TarGzExtractorProvider.java
@Override public void extract(Path archive, Path destination, ProgressListener progressListener) throws IOException { progressListener.start("Extracting archive: " + archive.getFileName(), ProgressListener.UNKNOWN); String canonicalDestination = destination.toFile().getCanonicalPath(); GzipCompressorInputStream gzipIn = new GzipCompressorInputStream(Files.newInputStream(archive)); try (TarArchiveInputStream in = new TarArchiveInputStream(gzipIn)) { TarArchiveEntry entry; while ((entry = in.getNextTarEntry()) != null) { Path entryTarget = destination.resolve(entry.getName()); String canonicalTarget = entryTarget.toFile().getCanonicalPath(); if (!canonicalTarget.startsWith(canonicalDestination + File.separator)) { throw new IOException("Blocked unzipping files outside destination: " + entry.getName()); }/*www. ja v a2s.c om*/ progressListener.update(1); logger.fine(entryTarget.toString()); if (entry.isDirectory()) { if (!Files.exists(entryTarget)) { Files.createDirectories(entryTarget); } } else if (entry.isFile()) { if (!Files.exists(entryTarget.getParent())) { Files.createDirectories(entryTarget.getParent()); } try (OutputStream out = new BufferedOutputStream(Files.newOutputStream(entryTarget))) { IOUtils.copy(in, out); PosixFileAttributeView attributeView = Files.getFileAttributeView(entryTarget, PosixFileAttributeView.class); if (attributeView != null) { attributeView.setPermissions(PosixUtil.getPosixFilePermissions(entry.getMode())); } } } else { // we don't know what kind of entry this is (we only process directories and files). logger.warning("Skipping entry (unknown type): " + entry.getName()); } } progressListener.done(); } }
From source file:com.puppetlabs.geppetto.forge.util.TarUtils.java
/** * Unpack the content read from <i>source</i> into <i>targetFolder</i>. If the * <i>skipTopFolder</i> is set, then don't assume that the archive contains one * single folder and unpack the content of that folder, not including the folder * itself./* www . j av a 2 s.c om*/ * * @param source * The input source. Must be in <i>TAR</i> format. * @param targetFolder * The destination folder for the unpack. Not used when a <tt>fileCatcher</tt> is provided * @param skipTopFolder * Set to <code>true</code> to unpack beneath the top folder * of the archive. The archive must consist of one single folder and nothing else * in order for this to work. * @param fileCatcher * Used when specific files should be picked from the archive without writing them to disk. Can be * <tt>null</tt>. * @throws IOException */ public static void unpack(InputStream source, File targetFolder, boolean skipTopFolder, FileCatcher fileCatcher) throws IOException { String topFolderName = null; Map<File, Map<Integer, List<String>>> chmodMap = new HashMap<File, Map<Integer, List<String>>>(); TarArchiveInputStream in = new TarArchiveInputStream(source); try { TarArchiveEntry te = in.getNextTarEntry(); if (te == null) { throw new IOException("No entry in the tar file"); } do { if (te.isGlobalPaxHeader()) continue; String name = te.getName(); if (skipTopFolder) { int firstSlash = name.indexOf('/'); if (firstSlash < 0) throw new IOException("Archive doesn't contain one single folder"); String tfName = name.substring(0, firstSlash); if (topFolderName == null) topFolderName = tfName; else if (!tfName.equals(topFolderName)) throw new IOException("Archive doesn't contain one single folder"); name = name.substring(firstSlash + 1); } if (name.length() == 0) continue; String linkName = te.getLinkName(); if (linkName != null) { if (linkName.trim().equals("")) linkName = null; } if (fileCatcher != null) { if (linkName == null && !te.isDirectory() && fileCatcher.accept(name)) { if (fileCatcher.catchData(name, in)) // We're done here return; } continue; } File outFile = new File(targetFolder, name); if (linkName != null) { if (!OsUtil.link(targetFolder, name, te.getLinkName())) throw new IOException("Archive contains links but they are not supported on this platform"); } else { if (te.isDirectory()) { outFile.mkdirs(); } else { outFile.getParentFile().mkdirs(); OutputStream target = new FileOutputStream(outFile); StreamUtil.copy(in, target); target.close(); outFile.setLastModified(te.getModTime().getTime()); } registerChmodFile(chmodMap, targetFolder, Integer.valueOf(te.getMode()), name); } } while ((te = in.getNextTarEntry()) != null); } finally { StreamUtil.close(in); } chmod(chmodMap); }
From source file:com.google.devtools.build.lib.bazel.repository.CompressedTarFunction.java
@Override public Path decompress(DecompressorDescriptor descriptor) throws RepositoryFunctionException { Optional<String> prefix = descriptor.prefix(); boolean foundPrefix = false; try (InputStream decompressorStream = getDecompressorStream(descriptor)) { TarArchiveInputStream tarStream = new TarArchiveInputStream(decompressorStream); TarArchiveEntry entry; while ((entry = tarStream.getNextTarEntry()) != null) { StripPrefixedPath entryPath = StripPrefixedPath.maybeDeprefix(entry.getName(), prefix); foundPrefix = foundPrefix || entryPath.foundPrefix(); if (entryPath.skip()) { continue; }/*from ww w . ja va 2 s . co m*/ Path filename = descriptor.repositoryPath().getRelative(entryPath.getPathFragment()); FileSystemUtils.createDirectoryAndParents(filename.getParentDirectory()); if (entry.isDirectory()) { FileSystemUtils.createDirectoryAndParents(filename); } else { if (entry.isSymbolicLink() || entry.isLink()) { PathFragment linkName = new PathFragment(entry.getLinkName()); boolean wasAbsolute = linkName.isAbsolute(); // Strip the prefix from the link path if set. linkName = StripPrefixedPath.maybeDeprefix(linkName.getPathString(), prefix) .getPathFragment(); if (wasAbsolute) { // Recover the path to an absolute path as maybeDeprefix() relativize the path // even if the prefix is not set linkName = descriptor.repositoryPath().getRelative(linkName).asFragment(); } if (entry.isSymbolicLink()) { FileSystemUtils.ensureSymbolicLink(filename, linkName); } else { FileSystemUtils.createHardLink(filename, descriptor.repositoryPath().getRelative(linkName)); } } else { Files.copy(tarStream, filename.getPathFile().toPath(), StandardCopyOption.REPLACE_EXISTING); filename.chmod(entry.getMode()); // This can only be done on real files, not links, or it will skip the reader to // the next "real" file to try to find the mod time info. Date lastModified = entry.getLastModifiedDate(); filename.setLastModifiedTime(lastModified.getTime()); } } } } catch (IOException e) { throw new RepositoryFunctionException(e, Transience.TRANSIENT); } if (prefix.isPresent() && !foundPrefix) { throw new RepositoryFunctionException( new IOException("Prefix " + prefix.get() + " was given, but not found in the archive"), Transience.PERSISTENT); } return descriptor.repositoryPath(); }
From source file:org.apache.hadoop.fs.tar.TarFileSystem.java
@Override public FileStatus[] listStatus(Path f) throws IOException { ArrayList<FileStatus> ret = new ArrayList<FileStatus>(); Path abs = makeAbsolute(f);/* www . j av a2 s . com*/ Path baseTar = getBaseTarPath(abs); String inFile = getFileInArchive(abs); FileStatus underlying = underlyingFS.getFileStatus(baseTar); // if subfile exists in the path, just return the status of that if (inFile != null) { ret.add(getFileStatus(abs)); } else { FSDataInputStream in = underlyingFS.open(baseTar); byte[] buffer = new byte[512]; for (long offset : index.getOffsetList()) { in.seek(offset - 512); // adjust for the header TarArchiveEntry entry = readHeaderEntry(in, buffer); // Construct a FileStatus object FileStatus fstatus = new FileStatus(entry.getSize(), entry.isDirectory(), (int) underlying.getReplication(), underlying.getBlockSize(), entry.getModTime().getTime(), underlying.getAccessTime(), new FsPermission((short) entry.getMode()), entry.getUserName(), entry.getGroupName(), new Path(abs.toUri().toASCIIString() + TAR_INFILESEP + entry.getName())); ret.add(fstatus); } } // copy back FileStatus[] retArray = new FileStatus[ret.size()]; ret.toArray(retArray); return retArray; }
From source file:org.apache.hadoop.fs.tar.TarFileSystem.java
@Override public FileStatus getFileStatus(Path f) throws IOException { FileStatus fstatus = null;/* w w w. j av a 2 s . c o m*/ Path abs = makeAbsolute(f); Path baseTar = getBaseTarPath(abs); String inFile = getFileInArchive(abs); FileStatus underlying = underlyingFS.getFileStatus(baseTar); if (inFile == null) { // return the status of the tar itself but make it a dir fstatus = new FileStatus(underlying.getLen(), true, underlying.getReplication(), underlying.getBlockSize(), underlying.getModificationTime(), underlying.getAccessTime(), underlying.getPermission(), underlying.getOwner(), underlying.getGroup(), abs); } else { long offset = index.getOffset(inFile); FSDataInputStream in = underlyingFS.open(baseTar); in.seek(offset - 512); TarArchiveEntry entry = readHeaderEntry(in); if (!entry.getName().equals(inFile)) { LOG.fatal("Index file is corrupt." + "Requested filename is present in index " + "but absent in TAR."); throw new IOException("NBU-TAR: FATAL: entry file name " + "does not match requested file name"); } // Construct a FileStatus object fstatus = new FileStatus(entry.getSize(), entry.isDirectory(), (int) underlying.getReplication(), underlying.getBlockSize(), entry.getModTime().getTime(), underlying.getAccessTime(), new FsPermission((short) entry.getMode()), entry.getUserName(), entry.getGroupName(), abs); } return fstatus; }