List of usage examples for org.apache.commons.compress.archivers.ar ArArchiveInputStream ArArchiveInputStream
public ArArchiveInputStream(final InputStream pInput)
From source file:de.dentrassi.pm.utils.deb.Packages.java
public static Map<String, String> parseControlFile(final File packageFile) throws IOException, ParserException { try (final ArArchiveInputStream in = new ArArchiveInputStream(new FileInputStream(packageFile))) { ArchiveEntry ar;/* w ww . jav a2 s. c o m*/ while ((ar = in.getNextEntry()) != null) { if (!ar.getName().equals("control.tar.gz")) { continue; } try (final TarArchiveInputStream inputStream = new TarArchiveInputStream(new GZIPInputStream(in))) { TarArchiveEntry te; while ((te = inputStream.getNextTarEntry()) != null) { String name = te.getName(); if (name.startsWith("./")) { name = name.substring(2); } if (!name.equals("control")) { continue; } return parseControlFile(inputStream); } } } } return null; }
From source file:com.espringtran.compressor4j.processor.ArProcessor.java
/** * Read from compressed file/*from w ww. j a v a 2 s . co m*/ * * @param srcPath * path of compressed file * @param fileCompressor * FileCompressor object * @throws Exception */ @Override public void read(String srcPath, FileCompressor fileCompressor) throws Exception { long t1 = System.currentTimeMillis(); byte[] data = FileUtil.convertFileToByte(srcPath); ByteArrayInputStream bais = new ByteArrayInputStream(data); ArArchiveInputStream ais = new ArArchiveInputStream(bais); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { byte[] buffer = new byte[1024]; int readByte; ArArchiveEntry entry = ais.getNextArEntry(); while (entry != null && entry.getSize() > 0) { long t2 = System.currentTimeMillis(); baos = new ByteArrayOutputStream(); readByte = ais.read(buffer); while (readByte != -1) { baos.write(buffer, 0, readByte); readByte = ais.read(buffer); } BinaryFile binaryFile = new BinaryFile(entry.getName(), baos.toByteArray()); fileCompressor.addBinaryFile(binaryFile); LogUtil.createAddFileLog(fileCompressor, binaryFile, t2, System.currentTimeMillis()); entry = ais.getNextArEntry(); } } catch (Exception e) { FileCompressor.LOGGER.error("Error on get compressor file", e); } finally { baos.close(); ais.close(); bais.close(); } LogUtil.createReadLog(fileCompressor, srcPath, data.length, t1, System.currentTimeMillis()); }
From source file:com.facebook.buck.cxx.ArchiveStepIntegrationTest.java
@Test @SuppressWarnings("PMD.AvoidUsingOctalValues") public void thatGeneratedArchivesAreDeterministic() throws IOException, InterruptedException { assumeTrue(Platform.detect() == Platform.MACOS || Platform.detect() == Platform.LINUX); ProjectFilesystem filesystem = new ProjectFilesystem(tmp.getRoot()); CxxPlatform platform = CxxPlatformUtils.build(new CxxBuckConfig(FakeBuckConfig.builder().build())); // Build up the paths to various files the archive step will use. SourcePathResolver sourcePathResolver = new SourcePathResolver(new SourcePathRuleFinder( new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer()))); Archiver archiver = platform.getAr(); Path output = filesystem.getPath("output.a"); Path input = filesystem.getPath("input.dat"); filesystem.writeContentsToPath("blah", input); Preconditions.checkState(filesystem.resolve(input).toFile().setExecutable(true)); // Build an archive step. ArchiveStep archiveStep = new ArchiveStep(filesystem, archiver.getEnvironment(), archiver.getCommandPrefix(sourcePathResolver), ImmutableList.of(), getArchiveOptions(false), output, ImmutableList.of(input), archiver); FileScrubberStep fileScrubberStep = new FileScrubberStep(filesystem, output, platform.getAr().getScrubbers()); // Execute the archive step and verify it ran successfully. ExecutionContext executionContext = TestExecutionContext.newInstance(); TestConsole console = (TestConsole) executionContext.getConsole(); int exitCode = archiveStep.execute(executionContext).getExitCode(); assertEquals("archive step failed: " + console.getTextWrittenToStdErr(), 0, exitCode); exitCode = fileScrubberStep.execute(executionContext).getExitCode(); assertEquals("archive scrub step failed: " + console.getTextWrittenToStdErr(), 0, exitCode); // Now read the archive entries and verify that the timestamp, UID, and GID fields are // zero'd out. try (ArArchiveInputStream stream = new ArArchiveInputStream( new FileInputStream(filesystem.resolve(output).toFile()))) { ArArchiveEntry entry = stream.getNextArEntry(); assertEquals(ObjectFileCommonModificationDate.COMMON_MODIFICATION_TIME_STAMP, entry.getLastModified()); assertEquals(0, entry.getUserId()); assertEquals(0, entry.getGroupId()); assertEquals(String.format("0%o", entry.getMode()), 0100644, entry.getMode()); }//from ww w .java2s .co m }
From source file:com.facebook.buck.cxx.ArchiveStepIntegrationTest.java
@Test public void emptyArchives() throws IOException, InterruptedException { ProjectFilesystem filesystem = new ProjectFilesystem(tmp.getRoot()); CxxPlatform platform = CxxPlatformUtils.build(new CxxBuckConfig(FakeBuckConfig.builder().build())); // Build up the paths to various files the archive step will use. SourcePathResolver sourcePathResolver = new SourcePathResolver(new SourcePathRuleFinder( new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer()))); Archiver archiver = platform.getAr(); Path output = filesystem.getPath("output.a"); // Build an archive step. ArchiveStep archiveStep = new ArchiveStep(filesystem, archiver.getEnvironment(), archiver.getCommandPrefix(sourcePathResolver), ImmutableList.of(), getArchiveOptions(false), output, ImmutableList.of(), archiver); // Execute the archive step and verify it ran successfully. ExecutionContext executionContext = TestExecutionContext.newInstance(); TestConsole console = (TestConsole) executionContext.getConsole(); int exitCode = archiveStep.execute(executionContext).getExitCode(); assertEquals("archive step failed: " + console.getTextWrittenToStdErr(), 0, exitCode); // Now read the archive entries and verify that the timestamp, UID, and GID fields are // zero'd out. try (ArArchiveInputStream stream = new ArArchiveInputStream( new FileInputStream(filesystem.resolve(output).toFile()))) { assertThat(stream.getNextArEntry(), Matchers.nullValue()); }/*w ww. ja v a2s . c o m*/ }
From source file:com.facebook.buck.cxx.CxxLibraryIntegrationTest.java
@Test public void thinArchivesDoNotContainAbsolutePaths() throws IOException { CxxPlatform cxxPlatform = CxxPlatformUtils.build(new CxxBuckConfig(FakeBuckConfig.builder().build())); assumeTrue(cxxPlatform.getAr().supportsThinArchives()); ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(this, "cxx_library", tmp); workspace.setUp();/* w w w . ja v a 2s .c om*/ Path archive = workspace.buildAndReturnOutput("-c", "cxx.archive_contents=thin", "//:foo#default,static"); // NOTE: Replace the thin header with a normal header just so the commons compress parser // can parse the archive contents. try (OutputStream outputStream = Files.newOutputStream(workspace.getPath(archive), StandardOpenOption.WRITE)) { outputStream.write(ObjectFileScrubbers.GLOBAL_HEADER); } // Now iterate the archive and verify it contains no absolute paths. try (ArArchiveInputStream stream = new ArArchiveInputStream( new FileInputStream(workspace.getPath(archive).toFile()))) { ArArchiveEntry entry; while ((entry = stream.getNextArEntry()) != null) { if (!entry.getName().isEmpty()) { assertFalse("found absolute path: " + entry.getName(), workspace.getDestPath().getFileSystem().getPath(entry.getName()).isAbsolute()); } } } }
From source file:com.facebook.buck.cxx.ArchiveStepIntegrationTest.java
@Test public void inputDirs() throws IOException, InterruptedException { assumeTrue(Platform.detect() == Platform.MACOS || Platform.detect() == Platform.LINUX); ProjectFilesystem filesystem = new ProjectFilesystem(tmp.getRoot()); CxxPlatform platform = CxxPlatformUtils.build(new CxxBuckConfig(FakeBuckConfig.builder().build())); // Build up the paths to various files the archive step will use. SourcePathResolver sourcePathResolver = new SourcePathResolver(new SourcePathRuleFinder( new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer()))); Archiver archiver = platform.getAr(); Path output = filesystem.getPath("output.a"); Path input = filesystem.getPath("foo/blah.dat"); filesystem.mkdirs(input.getParent()); filesystem.writeContentsToPath("blah", input); // Build an archive step. ArchiveStep archiveStep = new ArchiveStep(filesystem, archiver.getEnvironment(), archiver.getCommandPrefix(sourcePathResolver), ImmutableList.of(), getArchiveOptions(false), output, ImmutableList.of(input.getParent()), archiver); // Execute the archive step and verify it ran successfully. ExecutionContext executionContext = TestExecutionContext.newInstance(); TestConsole console = (TestConsole) executionContext.getConsole(); int exitCode = archiveStep.execute(executionContext).getExitCode(); assertEquals("archive step failed: " + console.getTextWrittenToStdErr(), 0, exitCode); // Now read the archive entries and verify that the timestamp, UID, and GID fields are // zero'd out. try (ArArchiveInputStream stream = new ArArchiveInputStream( new FileInputStream(filesystem.resolve(output).toFile()))) { ArArchiveEntry entry = stream.getNextArEntry(); assertThat(entry.getName(), Matchers.equalTo("blah.dat")); }/* w w w. j a va2 s . c o m*/ }
From source file:com.facebook.buck.cxx.ArchiveStepIntegrationTest.java
@Test public void thinArchives() throws IOException, InterruptedException { assumeTrue(Platform.detect() == Platform.MACOS || Platform.detect() == Platform.LINUX); ProjectFilesystem filesystem = new ProjectFilesystem(tmp.getRoot()); CxxPlatform platform = CxxPlatformUtils.build(new CxxBuckConfig(FakeBuckConfig.builder().build())); assumeTrue(platform.getAr().supportsThinArchives()); // Build up the paths to various files the archive step will use. SourcePathResolver sourcePathResolver = new SourcePathResolver(new SourcePathRuleFinder( new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer()))); Archiver archiver = platform.getAr(); Path output = filesystem.getPath("foo/libthin.a"); filesystem.mkdirs(output.getParent()); // Create a really large input file so it's obvious that the archive is thin. Path input = filesystem.getPath("bar/blah.dat"); filesystem.mkdirs(input.getParent()); byte[] largeInputFile = new byte[1024 * 1024]; byte[] fillerToRepeat = "hello\n".getBytes(StandardCharsets.UTF_8); for (int i = 0; i < largeInputFile.length; i++) { largeInputFile[i] = fillerToRepeat[i % fillerToRepeat.length]; }// w w w.ja va2s.co m filesystem.writeBytesToPath(largeInputFile, input); // Build an archive step. ArchiveStep archiveStep = new ArchiveStep(filesystem, archiver.getEnvironment(), archiver.getCommandPrefix(sourcePathResolver), ImmutableList.of(), getArchiveOptions(true), output, ImmutableList.of(input), archiver); // Execute the archive step and verify it ran successfully. ExecutionContext executionContext = TestExecutionContext.newInstance(); TestConsole console = (TestConsole) executionContext.getConsole(); int exitCode = archiveStep.execute(executionContext).getExitCode(); assertEquals("archive step failed: " + console.getTextWrittenToStdErr(), 0, exitCode); // Verify that the thin header is present. assertThat(filesystem.readFirstLine(output), Matchers.equalTo(Optional.of("!<thin>"))); // Verify that even though the archived contents is really big, the archive is still small. assertThat(filesystem.getFileSize(output), Matchers.lessThan(1000L)); // NOTE: Replace the thin header with a normal header just so the commons compress parser // can parse the archive contents. try (OutputStream outputStream = Files.newOutputStream(filesystem.resolve(output), StandardOpenOption.WRITE)) { outputStream.write(ObjectFileScrubbers.GLOBAL_HEADER); } // Now read the archive entries and verify that the timestamp, UID, and GID fields are // zero'd out. try (ArArchiveInputStream stream = new ArArchiveInputStream( new FileInputStream(filesystem.resolve(output).toFile()))) { ArArchiveEntry entry = stream.getNextArEntry(); // Verify that the input names are relative paths from the outputs parent dir. assertThat(entry.getName(), Matchers.equalTo(output.getParent().relativize(input).toString())); } }
From source file:de.dentrassi.build.apt.repo.AptWriter.java
private BinaryPackagePackagesFile readArtifact(final File packageFile) throws Exception { try (final ArArchiveInputStream in = new ArArchiveInputStream(new FileInputStream(packageFile))) { ArchiveEntry ar;//from ww w . j a v a 2s. c o m while ((ar = in.getNextEntry()) != null) { if (!ar.getName().equals("control.tar.gz")) { continue; } try (final TarArchiveInputStream inputStream = new TarArchiveInputStream(new GZIPInputStream(in))) { TarArchiveEntry te; while ((te = inputStream.getNextTarEntry()) != null) { if (!te.getName().equals("./control")) { continue; } return convert(new BinaryPackageControlFile(inputStream), packageFile); } } } } return null; }
From source file:net.staticsnow.nexus.repository.apt.internal.hosted.AptHostedFacet.java
@Transactional(retryOn = { ONeedRetryException.class }) public void ingestAsset(Payload body) throws IOException, PGPException { AptFacet aptFacet = getRepository().facet(AptFacet.class); StorageTx tx = UnitOfWork.currentTx(); Bucket bucket = tx.findBucket(getRepository()); ControlFile control = null;/* ww w .j a v a 2 s . c o m*/ try (TempStreamSupplier supplier = new TempStreamSupplier(body.openInputStream()); ArArchiveInputStream is = new ArArchiveInputStream(supplier.get())) { ArchiveEntry debEntry; while ((debEntry = is.getNextEntry()) != null) { InputStream controlStream; switch (debEntry.getName()) { case "control.tar": controlStream = new CloseShieldInputStream(is); break; case "control.tar.gz": controlStream = new GZIPInputStream(new CloseShieldInputStream(is)); break; case "control.tar.xz": controlStream = new XZCompressorInputStream(new CloseShieldInputStream(is)); default: continue; } try (TarArchiveInputStream controlTarStream = new TarArchiveInputStream(controlStream)) { ArchiveEntry tarEntry; while ((tarEntry = controlTarStream.getNextEntry()) != null) { if (tarEntry.getName().equals("control") || tarEntry.getName().equals("./control")) { control = new ControlFileParser().parseControlFile(controlTarStream); } } } } if (control == null) { throw new IllegalOperationException("Invalid Debian package supplied"); } String name = control.getField("Package").map(f -> f.value).get(); String version = control.getField("Version").map(f -> f.value).get(); String architecture = control.getField("Architecture").map(f -> f.value).get(); String assetName = name + "_" + version + "_" + architecture + ".deb"; String assetPath = "pool/" + name.substring(0, 1) + "/" + name + "/" + assetName; Content content = aptFacet.put(assetPath, new StreamPayload(() -> supplier.get(), body.getSize(), body.getContentType())); Asset asset = Content.findAsset(tx, bucket, content); String indexSection = buildIndexSection(control, asset.size(), asset.getChecksums(FacetHelper.hashAlgorithms), assetPath); asset.formatAttributes().set(P_ARCHITECTURE, architecture); asset.formatAttributes().set(P_PACKAGE_NAME, name); asset.formatAttributes().set(P_PACKAGE_VERSION, version); asset.formatAttributes().set(P_INDEX_SECTION, indexSection); asset.formatAttributes().set(P_ASSET_KIND, "DEB"); tx.saveAsset(asset); List<AssetChange> changes = new ArrayList<>(); changes.add(new AssetChange(AssetAction.ADDED, asset)); for (Asset removed : selectOldPackagesToRemove(name, architecture)) { tx.deleteAsset(removed); changes.add(new AssetChange(AssetAction.REMOVED, removed)); } rebuildIndexesInTransaction(tx, changes.stream().toArray(AssetChange[]::new)); } }
From source file:org.apache.ant.compress.util.ArStreamFactory.java
/** * @param stream the stream to read from, should be buffered * @param encoding the encoding of the entry names, ignored *///from w w w . jav a 2s .co m public ArchiveInputStream getArchiveStream(InputStream stream, String encoding) throws IOException { return new ArArchiveInputStream(stream); }