Example usage for org.apache.commons.compress.archivers.tar TarArchiveInputStream getNextTarEntry

List of usage examples for org.apache.commons.compress.archivers.tar TarArchiveInputStream getNextTarEntry

Introduction

In this page you can find the example usage for org.apache.commons.compress.archivers.tar TarArchiveInputStream getNextTarEntry.

Prototype

public TarArchiveEntry getNextTarEntry() throws IOException 

Source Link

Document

Get the next entry in this tar archive.

Usage

From source file:org.jboss.qa.jenkins.test.executor.utils.unpack.GUnZipper.java

private static Set<TarArchiveEntry> getEntries(TarArchiveInputStream tarIn) throws IOException {
    final Set<TarArchiveEntry> entries = new HashSet<>();
    while (true) {
        final TarArchiveEntry entry = tarIn.getNextTarEntry();
        if (entry == null) {
            break;
        }//from w w w  .  j a  v a2 s.c o  m
        entries.add(entry);
    }
    return entries;
}

From source file:org.jboss.tools.openshift.reddeer.utils.FileHelper.java

public static void extractTarGz(File archive, File outputDirectory) {
    InputStream inputStream = null;
    try {//w ww  . j  av  a2  s .c  o m
        logger.info("Opening stream to gzip archive");
        inputStream = new GzipCompressorInputStream(new FileInputStream(archive));
    } catch (IOException ex) {
        throw new OpenShiftToolsException(
                "Exception occured while processing tar.gz file.\n" + ex.getMessage());
    }

    logger.info("Opening stream to tar archive");
    BufferedOutputStream outputStream = null;
    TarArchiveInputStream tarArchiveInputStream = new TarArchiveInputStream(inputStream);
    TarArchiveEntry currentEntry = null;
    try {
        while ((currentEntry = tarArchiveInputStream.getNextTarEntry()) != null) {
            if (currentEntry.isDirectory()) {
                logger.info("Creating directory: " + currentEntry.getName());
                createDirectory(new File(outputDirectory, currentEntry.getName()));
            } else {
                File outputFile = new File(outputDirectory, currentEntry.getName());
                if (!outputFile.getParentFile().exists()) {
                    logger.info("Creating directory: " + outputFile.getParentFile());
                    createDirectory(outputFile.getParentFile());
                }

                outputStream = new BufferedOutputStream(new FileOutputStream(outputFile));

                logger.info("Extracting file: " + currentEntry.getName());
                copy(tarArchiveInputStream, outputStream, (int) currentEntry.getSize());
                outputStream.close();

                outputFile.setExecutable(true);
                outputFile.setReadable(true);
                outputFile.setWritable(true);
            }
        }
    } catch (IOException e) {
        throw new OpenShiftToolsException("Exception occured while processing tar.gz file.\n" + e.getMessage());
    } finally {
        try {
            tarArchiveInputStream.close();
        } catch (Exception ex) {
        }
        try {
            outputStream.close();
        } catch (Exception ex) {
        }
    }
}

From source file:org.jboss.tools.runtime.core.extract.internal.UntarUtility.java

public IStatus extract(File dest, IOverwrite overwriteQuery, IProgressMonitor monitor) throws CoreException {
    String possibleRoot = null;//from w  w  w  . j  a  v  a 2s . c  o m
    try {
        dest.mkdir();
        TarArchiveInputStream tarIn = getTarArchiveInputStream(file);
        TarArchiveEntry tarEntry = tarIn.getNextTarEntry();
        while (tarEntry != null) {
            String name = tarEntry.getName();
            File destPath = new File(dest, name);
            if (tarEntry.isDirectory()) {
                destPath.mkdirs();
            } else {
                destPath.createNewFile();
                byte[] btoRead = new byte[1024];
                try (BufferedOutputStream bout = new BufferedOutputStream(new FileOutputStream(destPath))) {
                    int length = 0;
                    while ((length = tarIn.read(btoRead)) != -1) {
                        bout.write(btoRead, 0, length);
                    }
                }
            }

            // Lets check for a possible root, to avoid scanning the archive again later
            possibleRoot = checkForPossibleRootEntry(possibleRoot, name);

            tarEntry = tarIn.getNextTarEntry();
        }
        tarIn.close();
    } catch (IOException ioe) {
        throw new CoreException(new Status(IStatus.ERROR, RuntimeCoreActivator.PLUGIN_ID, 0,
                NLS.bind("Error extracting runtime {0}", ioe.getLocalizedMessage()), ioe)); //$NON-NLS-1$
    }
    this.discoveredRoot = possibleRoot;
    return Status.OK_STATUS;
}

From source file:org.jenkinsci.plugins.os_ci.utils.CompressUtils.java

public static void untarFile(File file) throws IOException {
    FileInputStream fileInputStream = null;
    String currentDir = file.getParent();
    try {/*from   w  ww.  j a  v  a2  s  . co  m*/
        fileInputStream = new FileInputStream(file);
        GZIPInputStream gzipInputStream = new GZIPInputStream(fileInputStream);
        TarArchiveInputStream tarArchiveInputStream = new TarArchiveInputStream(gzipInputStream);

        TarArchiveEntry tarArchiveEntry;

        while (null != (tarArchiveEntry = tarArchiveInputStream.getNextTarEntry())) {
            if (tarArchiveEntry.isDirectory()) {
                FileUtils.forceMkdir(new File(currentDir + File.separator + tarArchiveEntry.getName()));
            } else {
                byte[] content = new byte[(int) tarArchiveEntry.getSize()];
                int offset = 0;
                tarArchiveInputStream.read(content, offset, content.length - offset);
                FileOutputStream outputFile = new FileOutputStream(
                        currentDir + File.separator + tarArchiveEntry.getName());
                org.apache.commons.io.IOUtils.write(content, outputFile);
                outputFile.close();
            }
        }
    } catch (FileNotFoundException e) {
        throw new IOException(e.getStackTrace().toString());
    }
}

From source file:org.kitesdk.cli.commands.TarImportCommand.java

@Override
public int run() throws IOException {
    Preconditions.checkArgument(targets != null && targets.size() == 2,
            "Tar path and target dataset URI are required.");

    Preconditions.checkArgument(SUPPORTED_TAR_COMPRESSION_TYPES.contains(compressionType),
            "Compression type " + compressionType + " is not supported");

    String source = targets.get(0);
    String datasetUri = targets.get(1);

    long blockSize = getConf().getLong("dfs.blocksize", DEFAULT_BLOCK_SIZE);

    int success = 0;

    View<TarFileEntry> targetDataset;
    if (Datasets.exists(datasetUri)) {
        console.debug("Using existing dataset: {}", datasetUri);
        targetDataset = Datasets.load(datasetUri, TarFileEntry.class);
    } else {//  w  w  w  .ja v a  2  s . co  m
        console.info("Creating new dataset: {}", datasetUri);
        DatasetDescriptor.Builder descriptorBuilder = new DatasetDescriptor.Builder();
        descriptorBuilder.format(Formats.AVRO);
        descriptorBuilder.schema(TarFileEntry.class);
        targetDataset = Datasets.create(datasetUri, descriptorBuilder.build(), TarFileEntry.class);
    }

    DatasetWriter<TarFileEntry> writer = targetDataset.newWriter();

    // Create a Tar input stream wrapped in appropriate decompressor
    // TODO: Enhancement would be to use native compression libs
    TarArchiveInputStream tis;
    CompressionType tarCompressionType = CompressionType.NONE;

    if (compressionType.isEmpty()) {
        if (source.endsWith(".tar")) {
            tarCompressionType = CompressionType.NONE;
        } else if (source.endsWith(".tar.gz")) {
            tarCompressionType = CompressionType.GZIP;
        } else if (source.endsWith(".tar.bz2")) {
            tarCompressionType = CompressionType.BZIP2;
        }
    } else if (compressionType.equals("gzip")) {
        tarCompressionType = CompressionType.GZIP;
    } else if (compressionType.equals("bzip2")) {
        tarCompressionType = CompressionType.BZIP2;
    } else {
        tarCompressionType = CompressionType.NONE;
    }

    console.info("Using {} compression", tarCompressionType);

    switch (tarCompressionType) {
    case GZIP:
        tis = new TarArchiveInputStream(new GzipCompressorInputStream(open(source)));
        break;
    case BZIP2:
        tis = new TarArchiveInputStream(new BZip2CompressorInputStream(open(source)));
        break;
    case NONE:
    default:
        tis = new TarArchiveInputStream(open(source));
    }

    TarArchiveEntry entry;

    try {
        int count = 0;
        while ((entry = tis.getNextTarEntry()) != null) {
            if (!entry.isDirectory()) {
                long size = entry.getSize();
                if (size >= blockSize) {
                    console.warn(
                            "Entry \"{}\" (size {}) is larger than the "
                                    + "HDFS block size of {}. This may result in remote block reads",
                            new Object[] { entry.getName(), size, blockSize });
                }

                byte[] buf = new byte[(int) size];
                try {
                    IOUtils.readFully(tis, buf, 0, (int) size);
                } catch (IOException e) {
                    console.error("Did not read entry {} successfully (entry size {})", entry.getName(), size);
                    success = 1;
                    throw e;
                }
                writer.write(TarFileEntry.newBuilder().setFilename(entry.getName())
                        .setFilecontent(ByteBuffer.wrap(buf)).build());
                count++;
            }
        }
        console.info("Added {} records to \"{}\"", count, targetDataset.getDataset().getName());
    } finally {
        IOUtils.closeStream(writer);
        IOUtils.closeStream(tis);
    }

    return success;
}

From source file:org.mskcc.cbio.importer.io.internal.FileUtilsImpl.java

private InputStream readContent(ImportDataRecord importDataRecord, InputStream is) throws Exception {

    InputStream toReturn = null;//from   w  w  w.java2  s  . c  o m

    try {
        // decompress .gz file
        if (LOG.isInfoEnabled()) {
            LOG.info("readContent(), decompressing: " + importDataRecord.getCanonicalPathToData());
        }

        InputStream unzippedContent = new GzipCompressorInputStream(is);
        // if tarball, untar
        if (importDataRecord.getCanonicalPathToData().toLowerCase().endsWith("tar.gz")) {
            if (LOG.isInfoEnabled()) {
                LOG.info("readContent(), gzip file is a tarball, untarring");
            }
            TarArchiveInputStream tis = new TarArchiveInputStream(unzippedContent);
            TarArchiveEntry entry = null;
            while ((entry = tis.getNextTarEntry()) != null) {
                String entryName = entry.getName();
                String dataFile = importDataRecord.getDataFilename();
                if (dataFile.contains(DatatypeMetadata.TUMOR_TYPE_TAG)) {
                    dataFile = dataFile.replaceAll(DatatypeMetadata.TUMOR_TYPE_TAG,
                            importDataRecord.getTumorType().toUpperCase());
                }
                if (entryName.contains(dataFile)) {
                    if (LOG.isInfoEnabled()) {
                        LOG.info("Processing tar-archive: " + importDataRecord.getDataFilename());
                    }
                    toReturn = tis;
                    break;
                }
            }
        } else {
            toReturn = unzippedContent;
        }
    } catch (Exception e) {
        throw e;
    }

    // outta here
    return toReturn;
}

From source file:org.mulima.internal.freedb.FreeDbTarDaoImpl.java

/**
 * {@inheritDoc}//from   w w w .  j  a  v  a2  s  . co  m
 */
@Override
public List<Disc> getAllDiscsFromOffset(int startNum, int numToRead) {
    FileInputStream fin = null;
    BufferedInputStream bfin = null;
    TarArchiveInputStream tin = null;
    List<Disc> discs = new ArrayList<Disc>();
    try {
        fin = new FileInputStream(tarArchive);
        bfin = new BufferedInputStream(fin);
        tin = new TarArchiveInputStream(bfin);

        int currentNum = 0;
        TarArchiveEntry entry = tin.getNextTarEntry();
        ProgressBar progress = new SLF4JProgressBar("TAR getDiscs", numToRead);
        while (entry != null && (numToRead < 0 || currentNum < startNum + numToRead)) {
            if (!entry.isDirectory() && currentNum >= startNum) {
                logger.debug("Loading: " + entry.getName());
                int offset = 0;
                byte[] content = new byte[(int) entry.getSize()];
                while (offset < content.length) {
                    offset += tin.read(content, offset, content.length - offset);
                }
                Disc disc = bytesToDisc(content);
                if (disc == null) {
                    logger.warn("Invalid file: " + entry.getName());
                } else {
                    logger.debug(disc.toString());
                    discs.add(disc);
                }
            }

            entry = tin.getNextTarEntry();
            currentNum++;
            progress.next();
        }

        if (entry == null) {
            progress.done();
        }
    } catch (IOException e) {
        logger.error("Problem reading tar archive.", e);
        throw new UncheckedIOException("Problem reading tar archive.", e);
    } finally {
        try {
            if (tin != null) {
                tin.close();
            } else if (bfin != null) {
                bfin.close();
            } else if (fin != null) {
                fin.close();
            }
        } catch (IOException e) {
            logger.error("Problem closing streams.", e);
        }
    }
    return discs;
}

From source file:org.openjump.core.ui.plugin.file.open.OpenFileWizardState.java

public void setupFileLoaders(File[] files, FileLayerLoader fileLayerLoader) {
    Set<File> fileSet = new TreeSet<File>(Arrays.asList(files));
    multiLoaderFiles.clear();//  w  w  w  . j  av  a2  s  . com
    // explicit loader chosen
    if (fileLayerLoader != null) {
        fileLoaderMap.clear();
        for (File file : fileSet) {
            setFileLoader(file.toURI(), fileLayerLoader);
        }
    } else {
        // Remove old entries in fileloadermap
        fileLoaderMap.clear();
        //      for (Iterator<Entry<URI, FileLayerLoader>> iterator = fileLoaderMap.entrySet()
        //        .iterator(); iterator.hasNext();) {
        //        Entry<URI, FileLayerLoader> entry = iterator.next();
        //        URI fileUri = entry.getKey();
        //        File file;

        //        if (fileUri.getScheme().equals("zip")) {
        //          file = UriUtil.getZipFile(fileUri);
        //        } else {
        //          file = new File(fileUri);
        //        }
        //        
        //        if (!fileSet.contains(file)) {
        //          FileLayerLoader loader = entry.getValue();
        //          fileLoaderFiles.get(loader);
        //          Set<URI> loaderFiles = fileLoaderFiles.get(loader);
        //          if (loaderFiles != null) {
        //            loaderFiles.remove(fileUri);
        //          }
        //          iterator.remove();
        //        }
        //      }

        // manually add compressed files here
        for (File file : files) {
            // zip files
            if (CompressedFile.isZip(file.getName())) {
                try {
                    ZipFile zipFile = new ZipFile(file);
                    URI fileUri = file.toURI();
                    Enumeration entries = zipFile.getEntries();
                    while (entries.hasMoreElements()) {
                        ZipArchiveEntry entry = (ZipArchiveEntry) entries.nextElement();
                        if (!entry.isDirectory()) {
                            URI entryUri = UriUtil.createZipUri(file, entry.getName());
                            String entryExt = UriUtil.getFileExtension(entryUri);
                            //System.out.println(entryUri+"<->"+entryExt);
                            addFile(entryExt, entryUri);
                        }
                    }
                } catch (Exception e) {
                    errorHandler.handleThrowable(e);
                }
            }
            // tar[.gz,.bz...] (un)compressed archive files
            else if (CompressedFile.isTar(file.getName())) {
                try {
                    InputStream is = CompressedFile.openFile(file.getAbsolutePath(), null);
                    TarArchiveEntry entry;
                    TarArchiveInputStream tis = new TarArchiveInputStream(is);
                    while ((entry = tis.getNextTarEntry()) != null) {
                        if (!entry.isDirectory()) {
                            URI entryUri = UriUtil.createZipUri(file, entry.getName());

                            String entryExt = UriUtil.getFileExtension(entryUri);
                            addFile(entryExt, entryUri);
                        }
                    }
                    tis.close();
                } catch (Exception e) {
                    errorHandler.handleThrowable(e);
                }
            }
            // 7zip compressed files
            else if (CompressedFile.isSevenZ(file.getName())) {
                try {
                    //System.out.println(file.getName());
                    SevenZFile sevenZFile = new SevenZFile(file);
                    SevenZArchiveEntry entry;
                    while ((entry = sevenZFile.getNextEntry()) != null) {
                        if (!entry.isDirectory()) {
                            URI entryUri = UriUtil.createZipUri(file, entry.getName());

                            String entryExt = UriUtil.getFileExtension(entryUri);
                            addFile(entryExt, entryUri);
                        }
                    }
                    sevenZFile.close();
                } catch (IOException e) {
                    errorHandler.handleThrowable(e);
                }
            }
            // compressed files
            else if (CompressedFile.hasCompressedFileExtension(file.getName())) {
                String[] parts = file.getName().split("\\.");
                if (parts.length > 2)
                    addFile(parts[parts.length - 2], file.toURI());
            }
            // anything else is a plain data file
            else {
                URI fileUri = file.toURI();
                addFile(FileUtil.getExtension(file), fileUri);
            }
        }
    }
}

From source file:org.openmrs.module.openconceptlab.client.OclClient.java

@SuppressWarnings("resource")
public OclResponse ungzipAndUntarResponse(InputStream response, Date date) throws IOException {
    GZIPInputStream gzipIn = new GZIPInputStream(response);
    TarArchiveInputStream tarIn = new TarArchiveInputStream(gzipIn);
    boolean foundEntry = false;
    try {//from   w w w.  jav  a  2  s  .co  m
        TarArchiveEntry entry = tarIn.getNextTarEntry();
        while (entry != null) {
            if (entry.getName().equals("export.json")) {
                foundEntry = true;
                return new OclResponse(tarIn, entry.getSize(), date);
            }
            entry = tarIn.getNextTarEntry();
        }

        tarIn.close();
    } finally {
        if (!foundEntry) {
            IOUtils.closeQuietly(tarIn);
        }
    }
    throw new IOException("Unsupported format of response. Expected tar.gz archive with export.json.");
}

From source file:org.opentestsystem.delivery.testreg.transformer.TarUnbundler.java

public byte[][] unbundle(File bundledFile) throws IOException {

    TarArchiveInputStream tarInputStream = new TarArchiveInputStream(new FileInputStream(bundledFile));
    List<byte[]> byteArrays = new ArrayList<byte[]>();
    ByteArrayOutputStream bytesOut = null;

    TarArchiveEntry entry = null;/*from w  w w .  j  a  va  2  s.c o  m*/

    while ((entry = tarInputStream.getNextTarEntry()) != null) {

        byte[] buffer = new byte[4096];
        int len = 0;
        bytesOut = new ByteArrayOutputStream();

        while ((len = tarInputStream.read(buffer)) > 0) {
            bytesOut.write(buffer, 0, len);
        }

        byteArrays.add(entry.getName().getBytes());
        byteArrays.add(bytesOut.toByteArray());

        bytesOut.close();
    }

    tarInputStream.close();

    return byteArrays.toArray(new byte[0][]);
}