List of usage examples for org.apache.commons.compress.archivers.tar TarArchiveEntry getSize
public long getSize()
From source file:io.cloudslang.content.vmware.services.DeployOvfTemplateService.java
private ITransferVmdkFrom getTransferVmdK(final String templateFilePathStr, final String vmdkName) throws IOException { final Path templateFilePath = Paths.get(templateFilePathStr); if (isOva(templateFilePath)) { final TarArchiveInputStream tar = new TarArchiveInputStream(new FileInputStream(templateFilePathStr)); TarArchiveEntry entry; while ((entry = tar.getNextTarEntry()) != null) { if (new File(entry.getName()).getName().startsWith(vmdkName)) { return new TransferVmdkFromInputStream(tar, entry.getSize()); }/*from w w w. j a v a 2 s. co m*/ } } else if (isOvf(templateFilePath)) { final Path vmdkPath = templateFilePath.getParent().resolve(vmdkName); return new TransferVmdkFromFile(vmdkPath.toFile()); } throw new RuntimeException(NOT_OVA_OR_OVF); }
From source file:io.cloudslang.content.vmware.services.DeployOvfTemplateService.java
@NotNull private String getOvfTemplateAsString(final String templatePath) throws IOException { if (isOva(Paths.get(templatePath))) { try (final TarArchiveInputStream tar = new TarArchiveInputStream(new FileInputStream(templatePath))) { TarArchiveEntry entry; while ((entry = tar.getNextTarEntry()) != null) { if (isOvf(Paths.get(new File(entry.getName()).getName()))) { return OvfUtils.writeToString(tar, entry.getSize()); }/* ww w . j a v a2s.c o m*/ } } } else if (isOvf(Paths.get(templatePath))) { final InputStream inputStream = new FileInputStream(templatePath); return IOUtils.toString(inputStream, UTF_8); } throw new RuntimeException(FILE_COULD_NOT_BE_READ); }
From source file:com.amaze.filemanager.filesystem.compressed.extractcontents.helpers.GzipExtractor.java
@Override protected void extractWithFilter(@NonNull Filter filter) throws IOException { long totalBytes = 0; ArrayList<TarArchiveEntry> archiveEntries = new ArrayList<>(); TarArchiveInputStream inputStream = new TarArchiveInputStream( new GzipCompressorInputStream(new FileInputStream(filePath))); TarArchiveEntry tarArchiveEntry; while ((tarArchiveEntry = inputStream.getNextTarEntry()) != null) { if (CompressedHelper.isEntryPathValid(tarArchiveEntry.getName())) { if (filter.shouldExtract(tarArchiveEntry.getName(), tarArchiveEntry.isDirectory())) { archiveEntries.add(tarArchiveEntry); totalBytes += tarArchiveEntry.getSize(); }/* w w w.j a v a 2 s.c o m*/ } else { invalidArchiveEntries.add(tarArchiveEntry.getName()); } } listener.onStart(totalBytes, archiveEntries.get(0).getName()); inputStream.close(); inputStream = new TarArchiveInputStream(new GzipCompressorInputStream(new FileInputStream(filePath))); for (TarArchiveEntry entry : archiveEntries) { if (!listener.isCancelled()) { listener.onUpdate(entry.getName()); //TAR is sequential, you need to walk all the way to the file you want while (entry.hashCode() != inputStream.getNextTarEntry().hashCode()) ; extractEntry(context, inputStream, entry, outputPath); } } inputStream.close(); listener.onFinish(); }
From source file:com.redhat.red.offliner.ftest.SinglePlaintextDownloadOfTarballFTest.java
/** * In general, we should only have one test method per functional test. This allows for the best parallelism when we * execute the tests, especially if the setup takes some time. * * @throws Exception In case anything (anything at all) goes wrong! *///from ww w. j a v a 2s .co m @Test public void run() throws Exception { // Generate some test content String path = contentGenerator.newArtifactPath("tar.gz"); Map<String, byte[]> entries = new HashMap<>(); entries.put(contentGenerator.newArtifactPath("jar"), contentGenerator.newBinaryContent(2400)); entries.put(contentGenerator.newArtifactPath("jar"), contentGenerator.newBinaryContent(2400)); final File tgz = makeTarball(entries); System.out.println("tar content array has length: " + tgz.length()); // We only need one repo server. ExpectationServer server = new ExpectationServer(); server.start(); // Register the generated content by writing it to the path within the repo server's dir structure. // This way when the path is requested it can be downloaded instead of returning a 404. server.expect("GET", server.formatUrl(path), (req, resp) -> { // Content-Length: 47175 // Content-Type: application/x-gzip resp.setHeader("Content-Encoding", "gzip"); resp.setHeader("Content-Type", "application/x-gzip"); byte[] raw = FileUtils.readFileToByteArray(tgz); ByteArrayOutputStream baos = new ByteArrayOutputStream(); GzipCompressorOutputStream gzout = new GzipCompressorOutputStream(baos); gzout.write(raw); gzout.finish(); byte[] content = baos.toByteArray(); resp.setHeader("Content-Length", Long.toString(content.length)); OutputStream respStream = resp.getOutputStream(); respStream.write(content); respStream.flush(); System.out.println("Wrote content with length: " + content.length); }); byte[] content = FileUtils.readFileToByteArray(tgz); server.expect("GET", server.formatUrl(path + Main.SHA_SUFFIX), 200, sha1Hex(content)); server.expect("GET", server.formatUrl(path + Main.MD5_SUFFIX), 200, md5Hex(content)); // Write the plaintext file we'll use as input. File plaintextList = temporaryFolder.newFile("artifact-list." + getClass().getSimpleName() + ".txt"); String pathWithChecksum = contentGenerator.newPlaintextEntryWithChecksum(path, content); FileUtils.write(plaintextList, pathWithChecksum); Options opts = new Options(); opts.setBaseUrls(Collections.singletonList(server.getBaseUri())); // Capture the downloads here so we can verify the content. File downloads = temporaryFolder.newFolder(); opts.setDownloads(downloads); opts.setLocations(Collections.singletonList(plaintextList.getAbsolutePath())); opts.setConnections(1); // run `new Main(opts).run()` and return the Main instance so we can query it for errors, etc. Main finishedMain = run(opts); ConcurrentHashMap<String, Throwable> errors = finishedMain.getErrors(); System.out.printf("ERRORS:\n\n%s\n\n\n", StringUtils.join(errors.keySet().stream() .map(k -> "ERROR: " + k + ": " + errors.get(k).getMessage() + "\n " + StringUtils.join(errors.get(k).getStackTrace(), "\n ")) .collect(Collectors.toList()), "\n\n")); File downloaded = new File(downloads, path); assertThat("File: " + path + " doesn't seem to have been downloaded!", downloaded.exists(), equalTo(true)); // assertThat( "Downloaded file: " + path + " contains the wrong content!", // FileUtils.readFileToByteArray( downloaded ), equalTo( content ) ); File tarball = new File(downloads, path); System.out.println("Length of downloaded file: " + tarball.length()); File tmp = new File("/tmp/download.tar.gz"); File tmp2 = new File("/tmp/content.tar.gz"); FileUtils.writeByteArrayToFile(tmp2, content); FileUtils.copyFile(tarball, tmp); try (TarArchiveInputStream tarIn = new TarArchiveInputStream( new GzipCompressorInputStream(new FileInputStream(tarball)))) { TarArchiveEntry entry = null; while ((entry = tarIn.getNextTarEntry()) != null) { byte[] entryData = new byte[(int) entry.getSize()]; int read = tarIn.read(entryData, 0, entryData.length); assertThat("Not enough bytes read for: " + entry.getName(), read, equalTo((int) entry.getSize())); assertThat(entry.getName() + ": data doesn't match input", Arrays.equals(entries.get(entry.getName()), entryData), equalTo(true)); } } assertThat("Wrong number of downloads logged. Should have been 3 including checksums.", finishedMain.getDownloaded(), equalTo(3)); assertThat("Errors should be empty!", errors.isEmpty(), equalTo(true)); }
From source file:com.dtstack.jlogstash.inputs.ReadTarFile.java
public BufferedReader getNextBuffer() { if (currBuff != null) { //?currBuffer??inputstream currBuff = null;/*w ww . java 2 s .c om*/ } TarArchiveEntry entry = null; try { while ((entry = tarAchive.getNextTarEntry()) != null) { if (entry.isDirectory()) { continue; } currFileName = entry.getName(); currFileSize = (int) entry.getSize(); String identify = getIdentify(currFileName); long skipNum = getSkipNum(identify); if (skipNum >= entry.getSize()) { continue; } tarAchive.skip(skipNum); currBuff = new BufferedReader(new InputStreamReader(tarAchive, encoding)); break; } } catch (Exception e) { logger.error("", e); } if (currBuff == null) { try { doAfterReaderOver(); } catch (IOException e) { logger.error("", e); } } return currBuff; }
From source file:ca.nrc.cadc.sc2pkg.PackageIntTest.java
private Content getEntry(TarArchiveInputStream tar) throws IOException, NoSuchAlgorithmException { Content ret = new Content(); TarArchiveEntry entry = tar.getNextTarEntry(); ret.name = entry.getName();//from w ww . ja v a 2 s. co m if (ret.name.endsWith("README")) { byte[] buf = new byte[(int) entry.getSize()]; tar.read(buf); ByteArrayInputStream bis = new ByteArrayInputStream(buf); LineNumberReader r = new LineNumberReader(new InputStreamReader(bis)); String line = r.readLine(); while (line != null) { String[] tokens = line.split(" "); // status [md5 filename url] String status = tokens[0]; if ("OK".equals(status)) { String fname = tokens[1]; String md5 = tokens[2]; ret.md5map.put(fname, md5); } else { throw new RuntimeException("tar content failure: " + line); } line = r.readLine(); } } else { MessageDigest md5 = MessageDigest.getInstance("MD5"); byte[] buf = new byte[8192]; int n = tar.read(buf); while (n > 0) { md5.update(buf, 0, n); n = tar.read(buf); } byte[] md5sum = md5.digest(); ret.contentMD5 = HexUtil.toHex(md5sum); } return ret; }
From source file:edu.wisc.doit.tcrypt.BouncyCastleFileDecrypter.java
protected String readAndDecrypt(TarArchiveInputStream inputStream, final String fileName) throws IOException, InvalidCipherTextException { //Read keyfile.enc from the TAR final TarArchiveEntry keyFileEntry = inputStream.getNextTarEntry(); //Verify file name final String keyFileName = keyFileEntry.getName(); if (!fileName.equals(keyFileName)) { throw new IllegalArgumentException("The first entry in the TAR must be name: " + fileName); }/*from w w w .j a v a 2s .c o m*/ //Verify file size if (keyFileEntry.getSize() > MAX_ENCRYPTED_KEY_FILE_SIZE) { throw new IllegalArgumentException("The encrypted archive's key file cannot be longer than " + MAX_ENCRYPTED_KEY_FILE_SIZE + " bytes"); } //Decode the base64 keyfile final byte[] encKeyFileBase64Bytes = IOUtils.toByteArray(inputStream); final byte[] encKeyFileBytes = Base64.decodeBase64(encKeyFileBase64Bytes); //Decrypt the keyfile into UTF-8 String final AsymmetricBlockCipher decryptCipher = this.getDecryptCipher(); final byte[] keyFileBytes = decryptCipher.processBlock(encKeyFileBytes, 0, encKeyFileBytes.length); return new String(keyFileBytes, FileEncrypter.CHARSET); }
From source file:com.redhat.red.offliner.ftest.SinglePlaintextDownloadOfTarballFTest.java
@Test public void testGenericTarballDownload() throws Exception { // Generate some test content String path = contentGenerator.newArtifactPath("tar.gz"); Map<String, byte[]> entries = new HashMap<>(); entries.put(contentGenerator.newArtifactPath("jar"), contentGenerator.newBinaryContent(2400)); entries.put(contentGenerator.newArtifactPath("jar"), contentGenerator.newBinaryContent(2400)); final File tgz = makeTarball(entries); System.out.println("tar content array has length: " + tgz.length()); // We only need one repo server. ExpectationServer server = new ExpectationServer(); server.start();/*from w w w . j a va 2 s . co m*/ String url = server.formatUrl(path); // Register the generated content by writing it to the path within the repo server's dir structure. // This way when the path is requested it can be downloaded instead of returning a 404. server.expect("GET", url, (req, resp) -> { // Content-Length: 47175 // Content-Type: application/x-gzip resp.setHeader("Content-Encoding", "x-gzip"); resp.setHeader("Content-Type", "application/x-gzip"); byte[] raw = FileUtils.readFileToByteArray(tgz); ByteArrayOutputStream baos = new ByteArrayOutputStream(); GzipCompressorOutputStream gzout = new GzipCompressorOutputStream(baos); gzout.write(raw); gzout.finish(); byte[] content = baos.toByteArray(); resp.setHeader("Content-Length", Long.toString(content.length)); OutputStream respStream = resp.getOutputStream(); respStream.write(content); respStream.flush(); System.out.println("Wrote content with length: " + content.length); }); final PoolingHttpClientConnectionManager ccm = new PoolingHttpClientConnectionManager(); ccm.setMaxTotal(1); final HttpClientBuilder builder = HttpClients.custom().setConnectionManager(ccm); CloseableHttpClient client = builder.build(); HttpGet get = new HttpGet(url); // get.setHeader( "Accept-Encoding", "gzip,deflate" ); Boolean result = client.execute(get, (response) -> { Arrays.stream(response.getAllHeaders()).forEach((h) -> System.out.println("Header:: " + h)); Header contentEncoding = response.getEntity().getContentEncoding(); if (contentEncoding == null) { contentEncoding = response.getFirstHeader("Content-Encoding"); } System.out.printf("Got content encoding: %s\n", contentEncoding == null ? "None" : contentEncoding.getValue()); byte[] content = IOUtils.toByteArray(response.getEntity().getContent()); try (TarArchiveInputStream tarIn = new TarArchiveInputStream( new GzipCompressorInputStream(new ByteArrayInputStream(content)))) { TarArchiveEntry entry = null; while ((entry = tarIn.getNextTarEntry()) != null) { System.out.printf("Got tar entry: %s\n", entry.getName()); byte[] entryData = new byte[(int) entry.getSize()]; int read = tarIn.read(entryData, 0, entryData.length); } } return false; }); }
From source file:eu.openanalytics.rsb.component.AdminResource.java
private void extractCatalogFiles(final File packageSourceFile) throws IOException { final File tempDirectory = packageSourceFile.getParentFile(); // 1) extract TAR final File packageTarFile = File.createTempFile("rsb-install.", ".tar", tempDirectory); final GzipCompressorInputStream gzIn = new GzipCompressorInputStream( new FileInputStream(packageSourceFile)); FileOutputStream output = new FileOutputStream(packageTarFile); IOUtils.copyLarge(gzIn, output);//from w w w . j av a 2 s.c om IOUtils.closeQuietly(output); IOUtils.closeQuietly(gzIn); // 2) parse TAR and drop files in catalog final TarArchiveInputStream tarIn = new TarArchiveInputStream(new FileInputStream(packageTarFile)); TarArchiveEntry tarEntry = null; while ((tarEntry = tarIn.getNextTarEntry()) != null) { if (!tarEntry.isFile()) { continue; } final Matcher matcher = TAR_CATALOG_FILE_PATTERN.matcher(tarEntry.getName()); if (matcher.matches()) { final byte[] data = IOUtils.toByteArray(tarIn, tarEntry.getSize()); final String catalogFile = matcher.group(1); final File targetCatalogFile = new File(getConfiguration().getCatalogRootDirectory(), catalogFile); output = new FileOutputStream(targetCatalogFile); IOUtils.write(data, output); IOUtils.closeQuietly(output); getLogger().info("Wrote " + data.length + " bytes in catalog file: " + targetCatalogFile); } } IOUtils.closeQuietly(tarIn); }
From source file:heigit.ors.routing.graphhopper.extensions.reader.borders.CountryBordersReader.java
/** * Method to read the geometries from a GeoJSON file that represent the boundaries of different countries. Ideally * it should be written using many small objects split into hierarchies. * * If the file is a .tar.gz format, it will decompress it and then store the reulting data to be read into the * JSON object./*from ww w.j a v a 2 s.co m*/ * * @return A (Geo)JSON object representing the contents of the file */ private JSONObject readBordersData() throws IOException { String data = ""; InputStream is = null; BufferedReader buf = null; try { is = new FileInputStream(BORDER_FILE); if (BORDER_FILE.endsWith(".tar.gz")) { // We are working with a compressed file TarArchiveInputStream tis = new TarArchiveInputStream( new GzipCompressorInputStream(new BufferedInputStream(is))); TarArchiveEntry entry; StringBuilder sb = new StringBuilder(); while ((entry = tis.getNextTarEntry()) != null) { if (!entry.isDirectory()) { byte[] bytes = new byte[(int) entry.getSize()]; tis.read(bytes); String str = new String(bytes); sb.append(str); } } data = sb.toString(); } else { // Assume a normal file so read line by line buf = new BufferedReader(new InputStreamReader(is)); String line = ""; StringBuilder sb = new StringBuilder(); while ((line = buf.readLine()) != null) { sb.append(line); } data = sb.toString(); } } catch (IOException ioe) { LOGGER.warn("Cannot access borders file!"); throw ioe; } finally { try { if (is != null) is.close(); if (buf != null) buf.close(); } catch (IOException ioe) { LOGGER.warn("Error closing file reader buffers!"); } catch (NullPointerException npe) { // This can happen if the file itself wasn't available throw new IOException("Borders file " + BORDER_FILE + " not found!"); } } JSONObject json = new JSONObject(data); return json; }