List of usage examples for org.apache.commons.compress.archivers.tar TarArchiveEntry isDirectory
public boolean isDirectory()
From source file:com.streamsets.datacollector.restapi.StageLibraryResource.java
@POST @Path("/stageLibraries/install") @ApiOperation(value = "Install Stage libraries", response = Object.class, authorizations = @Authorization(value = "basic")) @Produces(MediaType.APPLICATION_JSON)//from w ww. j av a 2 s . c om @RolesAllowed({ AuthzRole.ADMIN, AuthzRole.ADMIN_REMOTE }) public Response installLibraries(List<String> libraryIdList) throws IOException { String runtimeDir = runtimeInfo.getRuntimeDir(); String version = buildInfo.getVersion(); List<String> libraryUrlList = new ArrayList<>(); List<RepositoryManifestJson> repoManifestList = stageLibrary.getRepositoryManifestList(); repoManifestList.forEach(repositoryManifestJson -> { List<String> libraryFilePathList = repositoryManifestJson.getStageLibraries().stream() .filter(stageLibrariesJson -> stageLibrariesJson.getStageLibraryManifest() != null && libraryIdList.contains(stageLibrariesJson.getStageLibraryManifest().getStageLibId())) .map(stageLibrariesJson -> stageLibrariesJson.getStageLibraryManifest().getStageLibFile()) .collect(Collectors.toList()); libraryUrlList.addAll(libraryFilePathList); }); if (libraryUrlList.size() != libraryIdList.size()) { throw new RuntimeException(Utils .format("Unable to find to stage library {} in configured repository list", libraryIdList)); } for (String libraryUrl : libraryUrlList) { try (Response response = ClientBuilder.newClient().target(libraryUrl).request().get()) { String runtimeDirParent = runtimeDir + "/.."; String[] runtimeDirStrSplitArr = runtimeDir.split("/"); String installDirName = runtimeDirStrSplitArr[runtimeDirStrSplitArr.length - 1]; String tarDirRootName = STREAMSETS_ROOT_DIR_PREFIX + version; InputStream inputStream = response.readEntity(InputStream.class); TarArchiveInputStream myTarFile = new TarArchiveInputStream( new GzipCompressorInputStream(inputStream)); TarArchiveEntry entry = myTarFile.getNextTarEntry(); String directory = null; while (entry != null) { if (entry.isDirectory()) { entry = myTarFile.getNextTarEntry(); if (directory == null) { // Initialize root folder if (entry.getName().startsWith(STREAMSETS_LIBS_FOLDER_NAME)) { directory = runtimeDir; } else { directory = runtimeDirParent; } } continue; } File curFile = new File(directory, entry.getName().replace(tarDirRootName, installDirName)); File parent = curFile.getParentFile(); if (!parent.exists() && !parent.mkdirs()) { // Failed to create directory throw new RuntimeException( Utils.format("Failed to create directory: {}", parent.getPath())); } OutputStream out = new FileOutputStream(curFile); IOUtils.copy(myTarFile, out); out.close(); entry = myTarFile.getNextTarEntry(); } myTarFile.close(); } } return Response.ok().build(); }
From source file:data.TarExtractorTest.java
@Test public void itExtractsTarFile() throws Exception { TarArchiveInputStream tarArchiveInputStream = mock(TarArchiveInputStream.class); whenNew(TarArchiveInputStream.class).withArguments(any(InputStream.class)) .thenReturn(tarArchiveInputStream); when(tarArchiveInputStream.getNextTarEntry()).thenAnswer(new Answer() { private int count = 0; public Object answer(InvocationOnMock invocationOnMock) { count++;/*from w w w .j a va 2 s . co m*/ if (count == 1) { TarArchiveEntry tarArchiveEntry = mock(TarArchiveEntry.class); when(tarArchiveEntry.getName()).thenReturn("data.gpdb"); when(tarArchiveEntry.isFile()).thenReturn(true); return tarArchiveEntry; } if (count == 2) { TarArchiveEntry tarArchiveEntry = mock(TarArchiveEntry.class); when(tarArchiveEntry.getName()).thenReturn("IpV6Data"); when(tarArchiveEntry.isDirectory()).thenReturn(true); return tarArchiveEntry; } return null; } }); File directory = mock(File.class); File fileInTar = spy(mock(File.class)); when(fileInTar.createNewFile()).thenReturn(true); whenNew(File.class).withArguments(directory, "data.gpdb").thenReturn(fileInTar); File directoryInTar = spy(mock(File.class)); when(directoryInTar.createNewFile()).thenReturn(true); whenNew(File.class).withArguments(directory, "IpV6Data").thenReturn(directoryInTar); FileOutputStream fileOutputStream = mock(FileOutputStream.class); whenNew(FileOutputStream.class).withArguments(fileInTar).thenReturn(fileOutputStream); when(tarArchiveInputStream.read(any(byte[].class))).thenAnswer(new Answer() { private int count = 0; public Object answer(InvocationOnMock invocationOnMock) { count++; return (count == 1) ? new Integer(654321) : new Integer(-1); } }); InputStream inputStream1 = mock(InputStream.class); TarExtractor tarExtractor = new TarExtractor(); assertThat(tarExtractor.extractTo(directory, inputStream1), equalTo(true)); verify(fileInTar).createNewFile(); verify(fileOutputStream).write(any(byte[].class), eq(0), eq(654321)); verify(fileOutputStream).close(); verifyNoMoreInteractions(fileOutputStream); verifyZeroInteractions(directoryInTar); }
From source file:heigit.ors.routing.graphhopper.extensions.reader.borders.CountryBordersReader.java
/** * Method to read the geometries from a GeoJSON file that represent the boundaries of different countries. Ideally * it should be written using many small objects split into hierarchies. * * If the file is a .tar.gz format, it will decompress it and then store the reulting data to be read into the * JSON object./* ww w . j a v a2 s.c o m*/ * * @return A (Geo)JSON object representing the contents of the file */ private JSONObject readBordersData() throws IOException { String data = ""; InputStream is = null; BufferedReader buf = null; try { is = new FileInputStream(BORDER_FILE); if (BORDER_FILE.endsWith(".tar.gz")) { // We are working with a compressed file TarArchiveInputStream tis = new TarArchiveInputStream( new GzipCompressorInputStream(new BufferedInputStream(is))); TarArchiveEntry entry; StringBuilder sb = new StringBuilder(); while ((entry = tis.getNextTarEntry()) != null) { if (!entry.isDirectory()) { byte[] bytes = new byte[(int) entry.getSize()]; tis.read(bytes); String str = new String(bytes); sb.append(str); } } data = sb.toString(); } else { // Assume a normal file so read line by line buf = new BufferedReader(new InputStreamReader(is)); String line = ""; StringBuilder sb = new StringBuilder(); while ((line = buf.readLine()) != null) { sb.append(line); } data = sb.toString(); } } catch (IOException ioe) { LOGGER.warn("Cannot access borders file!"); throw ioe; } finally { try { if (is != null) is.close(); if (buf != null) buf.close(); } catch (IOException ioe) { LOGGER.warn("Error closing file reader buffers!"); } catch (NullPointerException npe) { // This can happen if the file itself wasn't available throw new IOException("Borders file " + BORDER_FILE + " not found!"); } } JSONObject json = new JSONObject(data); return json; }
From source file:com.google.cloud.tools.managedcloudsdk.install.TarGzExtractorProvider.java
@Override public void extract(Path archive, Path destination, ProgressListener progressListener) throws IOException { progressListener.start("Extracting archive: " + archive.getFileName(), ProgressListener.UNKNOWN); String canonicalDestination = destination.toFile().getCanonicalPath(); GzipCompressorInputStream gzipIn = new GzipCompressorInputStream(Files.newInputStream(archive)); try (TarArchiveInputStream in = new TarArchiveInputStream(gzipIn)) { TarArchiveEntry entry; while ((entry = in.getNextTarEntry()) != null) { Path entryTarget = destination.resolve(entry.getName()); String canonicalTarget = entryTarget.toFile().getCanonicalPath(); if (!canonicalTarget.startsWith(canonicalDestination + File.separator)) { throw new IOException("Blocked unzipping files outside destination: " + entry.getName()); }//w ww. j a v a 2s. c om progressListener.update(1); logger.fine(entryTarget.toString()); if (entry.isDirectory()) { if (!Files.exists(entryTarget)) { Files.createDirectories(entryTarget); } } else if (entry.isFile()) { if (!Files.exists(entryTarget.getParent())) { Files.createDirectories(entryTarget.getParent()); } try (OutputStream out = new BufferedOutputStream(Files.newOutputStream(entryTarget))) { IOUtils.copy(in, out); PosixFileAttributeView attributeView = Files.getFileAttributeView(entryTarget, PosixFileAttributeView.class); if (attributeView != null) { attributeView.setPermissions(PosixUtil.getPosixFilePermissions(entry.getMode())); } } } else { // we don't know what kind of entry this is (we only process directories and files). logger.warning("Skipping entry (unknown type): " + entry.getName()); } } progressListener.done(); } }
From source file:com.google.devtools.build.lib.bazel.repository.TarGzFunction.java
@Nullable @Override/*from w ww.j a va 2 s .co m*/ public SkyValue compute(SkyKey skyKey, Environment env) throws RepositoryFunctionException { DecompressorDescriptor descriptor = (DecompressorDescriptor) skyKey.argument(); Optional<String> prefix = descriptor.prefix(); boolean foundPrefix = false; try (GZIPInputStream gzipStream = new GZIPInputStream( new FileInputStream(descriptor.archivePath().getPathFile()))) { TarArchiveInputStream tarStream = new TarArchiveInputStream(gzipStream); TarArchiveEntry entry; while ((entry = tarStream.getNextTarEntry()) != null) { StripPrefixedPath entryPath = StripPrefixedPath.maybeDeprefix(entry.getName(), prefix); foundPrefix = foundPrefix || entryPath.foundPrefix(); if (entryPath.skip()) { continue; } Path filename = descriptor.repositoryPath().getRelative(entryPath.getPathFragment()); FileSystemUtils.createDirectoryAndParents(filename.getParentDirectory()); if (entry.isDirectory()) { FileSystemUtils.createDirectoryAndParents(filename); } else { if (entry.isSymbolicLink()) { PathFragment linkName = new PathFragment(entry.getLinkName()); if (linkName.isAbsolute()) { linkName = linkName.relativeTo(PathFragment.ROOT_DIR); linkName = descriptor.repositoryPath().getRelative(linkName).asFragment(); } FileSystemUtils.ensureSymbolicLink(filename, linkName); } else { Files.copy(tarStream, filename.getPathFile().toPath(), StandardCopyOption.REPLACE_EXISTING); filename.chmod(entry.getMode()); } } } } catch (IOException e) { throw new RepositoryFunctionException(e, Transience.TRANSIENT); } if (prefix.isPresent() && !foundPrefix) { throw new RepositoryFunctionException( new IOException("Prefix " + prefix.get() + " was given, but not found in the archive"), Transience.PERSISTENT); } return new DecompressorValue(descriptor.repositoryPath()); }
From source file:com.google.devtools.build.lib.bazel.repository.CompressedTarFunction.java
@Override public Path decompress(DecompressorDescriptor descriptor) throws RepositoryFunctionException { Optional<String> prefix = descriptor.prefix(); boolean foundPrefix = false; try (InputStream decompressorStream = getDecompressorStream(descriptor)) { TarArchiveInputStream tarStream = new TarArchiveInputStream(decompressorStream); TarArchiveEntry entry; while ((entry = tarStream.getNextTarEntry()) != null) { StripPrefixedPath entryPath = StripPrefixedPath.maybeDeprefix(entry.getName(), prefix); foundPrefix = foundPrefix || entryPath.foundPrefix(); if (entryPath.skip()) { continue; }/* ww w .j a v a 2 s.c om*/ Path filename = descriptor.repositoryPath().getRelative(entryPath.getPathFragment()); FileSystemUtils.createDirectoryAndParents(filename.getParentDirectory()); if (entry.isDirectory()) { FileSystemUtils.createDirectoryAndParents(filename); } else { if (entry.isSymbolicLink() || entry.isLink()) { PathFragment linkName = new PathFragment(entry.getLinkName()); boolean wasAbsolute = linkName.isAbsolute(); // Strip the prefix from the link path if set. linkName = StripPrefixedPath.maybeDeprefix(linkName.getPathString(), prefix) .getPathFragment(); if (wasAbsolute) { // Recover the path to an absolute path as maybeDeprefix() relativize the path // even if the prefix is not set linkName = descriptor.repositoryPath().getRelative(linkName).asFragment(); } if (entry.isSymbolicLink()) { FileSystemUtils.ensureSymbolicLink(filename, linkName); } else { FileSystemUtils.createHardLink(filename, descriptor.repositoryPath().getRelative(linkName)); } } else { Files.copy(tarStream, filename.getPathFile().toPath(), StandardCopyOption.REPLACE_EXISTING); filename.chmod(entry.getMode()); // This can only be done on real files, not links, or it will skip the reader to // the next "real" file to try to find the mod time info. Date lastModified = entry.getLastModifiedDate(); filename.setLastModifiedTime(lastModified.getTime()); } } } } catch (IOException e) { throw new RepositoryFunctionException(e, Transience.TRANSIENT); } if (prefix.isPresent() && !foundPrefix) { throw new RepositoryFunctionException( new IOException("Prefix " + prefix.get() + " was given, but not found in the archive"), Transience.PERSISTENT); } return descriptor.repositoryPath(); }
From source file:ezbake.deployer.publishers.openShift.RhcApplication.java
private void writeTarFileToProjectDirectory(byte[] artifact) throws DeploymentException { final File gitDbPath = gitRepo.getRepository().getDirectory(); final File projectDir = gitDbPath.getParentFile(); try {/*from w w w . j ava 2s . c om*/ CompressorInputStream uncompressedInput = new GzipCompressorInputStream( new ByteArrayInputStream(artifact)); TarArchiveInputStream inputStream = new TarArchiveInputStream(uncompressedInput); // copy the existing entries TarArchiveEntry nextEntry; while ((nextEntry = (TarArchiveEntry) inputStream.getNextEntry()) != null) { File fileToWrite = new File(projectDir, nextEntry.getName()); if (nextEntry.isDirectory()) { fileToWrite.mkdirs(); } else { File house = fileToWrite.getParentFile(); if (!house.exists()) { house.mkdirs(); } copyInputStreamToFile(inputStream, fileToWrite); Files.setPosixFilePermissions(fileToWrite, nextEntry.getMode()); } } } catch (IOException e) { log.error("[" + getApplicationName() + "]" + e.getMessage(), e); throw new DeploymentException(e.getMessage()); } }
From source file:com.yahoo.parsec.gradle.utils.FileUtils.java
/** * Un-TarZip a tgz file.//from w w w . j av a 2s . c o m * * @param resourcePath resource path * @param outputPath output path * @param overwrite overwrite flag * @throws IOException IOException */ public void unTarZip(String resourcePath, String outputPath, boolean overwrite) throws IOException { try (InputStream inputStream = getClass().getResourceAsStream(resourcePath); GzipCompressorInputStream gzipCompressorInputStream = new GzipCompressorInputStream(inputStream); TarArchiveInputStream tarArchiveInputStream = new TarArchiveInputStream( gzipCompressorInputStream);) { TarArchiveEntry tarArchiveEntry; logger.info("Extracting tgz file to " + outputPath); while ((tarArchiveEntry = tarArchiveInputStream.getNextTarEntry()) != null) { final File outputFile = new File(outputPath, tarArchiveEntry.getName()); if (!overwrite && outputFile.exists()) { continue; } if (tarArchiveEntry.isDirectory()) { outputFile.mkdirs(); } else { Files.copy(tarArchiveInputStream, outputFile.toPath(), StandardCopyOption.REPLACE_EXISTING); } } } catch (IOException e) { throw e; } }
From source file:io.syndesis.project.converter.DefaultProjectGeneratorTest.java
private Path generate(GenerateProjectRequest request, ProjectGeneratorProperties generatorProperties) throws IOException { try (InputStream is = new DefaultProjectGenerator(new ConnectorCatalog(CATALOG_PROPERTIES), generatorProperties, registry).generate(request)) { Path ret = Files.createTempDirectory("integration-runtime"); try (TarArchiveInputStream tis = new TarArchiveInputStream(is)) { TarArchiveEntry tarEntry = tis.getNextTarEntry(); // tarIn is a TarArchiveInputStream while (tarEntry != null) {// create a file with the same name as the tarEntry File destPath = new File(ret.toFile(), tarEntry.getName()); if (tarEntry.isDirectory()) { destPath.mkdirs();//from www . j a v a 2 s. c o m } else { destPath.getParentFile().mkdirs(); destPath.createNewFile(); byte[] btoRead = new byte[8129]; BufferedOutputStream bout = new BufferedOutputStream(new FileOutputStream(destPath)); int len = tis.read(btoRead); while (len != -1) { bout.write(btoRead, 0, len); len = tis.read(btoRead); } bout.close(); } tarEntry = tis.getNextTarEntry(); } } return ret; } }
From source file:gobblin.data.management.copy.writer.TarArchiveInputStreamDataWriter.java
/** * Untars the passed in {@link FileAwareInputStream} to the task's staging directory. Uses the name of the root * {@link TarArchiveEntry} in the stream as the directory name for the untarred file. The method also commits the data * by moving the file from staging to output directory. * * @see gobblin.data.management.copy.writer.FileAwareInputStreamDataWriter#write(gobblin.data.management.copy.FileAwareInputStream) *//*from w ww .j a v a 2 s . co m*/ @Override public void writeImpl(InputStream inputStream, Path writeAt, CopyableFile copyableFile) throws IOException { this.closer.register(inputStream); TarArchiveInputStream tarIn = new TarArchiveInputStream(inputStream); final ReadableByteChannel inputChannel = Channels.newChannel(tarIn); TarArchiveEntry tarEntry; // flush the first entry in the tar, which is just the root directory tarEntry = tarIn.getNextTarEntry(); String tarEntryRootName = StringUtils.remove(tarEntry.getName(), Path.SEPARATOR); log.info("Unarchiving at " + writeAt); try { while ((tarEntry = tarIn.getNextTarEntry()) != null) { // the API tarEntry.getName() is misleading, it is actually the path of the tarEntry in the tar file String newTarEntryPath = tarEntry.getName().replace(tarEntryRootName, writeAt.getName()); Path tarEntryStagingPath = new Path(writeAt.getParent(), newTarEntryPath); if (tarEntry.isDirectory() && !this.fs.exists(tarEntryStagingPath)) { this.fs.mkdirs(tarEntryStagingPath); } else if (!tarEntry.isDirectory()) { FSDataOutputStream out = this.fs.create(tarEntryStagingPath, true); final WritableByteChannel outputChannel = Channels.newChannel(out); try { StreamCopier copier = new StreamCopier(inputChannel, outputChannel); if (isInstrumentationEnabled()) { copier.withCopySpeedMeter(this.copySpeedMeter); } this.bytesWritten.addAndGet(copier.copy()); if (isInstrumentationEnabled()) { log.info("File {}: copied {} bytes, average rate: {} B/s", copyableFile.getOrigin().getPath(), this.copySpeedMeter.getCount(), this.copySpeedMeter.getMeanRate()); } else { log.info("File {} copied.", copyableFile.getOrigin().getPath()); } } finally { out.close(); outputChannel.close(); } } } } finally { tarIn.close(); inputChannel.close(); inputStream.close(); } }