Example usage for org.apache.commons.compress.compressors CompressorInputStream close

List of usage examples for org.apache.commons.compress.compressors CompressorInputStream close

Introduction

In this page you can find the example usage for org.apache.commons.compress.compressors CompressorInputStream close.

Prototype

public void close() throws IOException 

Source Link

Document

Closes this input stream and releases any system resources associated with the stream.

Usage

From source file:deployer.TestUtils.java

public static ByteBuffer createSampleOpenShiftWebAppTarBall() throws IOException, ArchiveException {
    ByteArrayInputStream bis = new ByteArrayInputStream(createSampleAppTarBall(ArtifactType.WebApp).array());
    CompressorInputStream cis = new GzipCompressorInputStream(bis);
    ArchiveInputStream ais = new TarArchiveInputStream(cis);

    ByteArrayOutputStream bos = new ByteArrayOutputStream(bis.available() + 2048);
    CompressorOutputStream cos = new GzipCompressorOutputStream(bos);
    ArchiveOutputStream aos = new TarArchiveOutputStream(cos);

    ArchiveEntry nextEntry;/*from  w w w .j av a2 s .  com*/
    while ((nextEntry = ais.getNextEntry()) != null) {
        aos.putArchiveEntry(nextEntry);
        IOUtils.copy(ais, aos);
        aos.closeArchiveEntry();
    }
    ais.close();
    cis.close();
    bis.close();

    TarArchiveEntry entry = new TarArchiveEntry(
            Paths.get(".openshift", CONFIG_DIRECTORY, "/standalone.xml").toFile());
    byte[] xmlData = SAMPLE_STANDALONE_DATA.getBytes();
    entry.setSize(xmlData.length);
    aos.putArchiveEntry(entry);
    IOUtils.write(xmlData, aos);
    aos.closeArchiveEntry();

    aos.finish();
    cos.close();
    bos.flush();
    return ByteBuffer.wrap(bos.toByteArray());
}

From source file:deployer.TestUtils.java

public static ByteBuffer createSampleOpenShiftWebAppTarBallWithEmptyFiles(String[] filepaths)
        throws IOException, ArchiveException {
    ByteArrayInputStream bis = new ByteArrayInputStream(createSampleAppTarBall(ArtifactType.WebApp).array());
    CompressorInputStream cis = new GzipCompressorInputStream(bis);
    ArchiveInputStream ais = new TarArchiveInputStream(cis);

    ByteArrayOutputStream bos = new ByteArrayOutputStream(bis.available() + 2048);
    CompressorOutputStream cos = new GzipCompressorOutputStream(bos);
    ArchiveOutputStream aos = new TarArchiveOutputStream(cos);

    ArchiveEntry nextEntry;//from   w ww  .  j  a v  a 2  s  .c  o  m
    while ((nextEntry = ais.getNextEntry()) != null) {
        aos.putArchiveEntry(nextEntry);
        IOUtils.copy(ais, aos);
        aos.closeArchiveEntry();
    }
    ais.close();
    cis.close();
    bis.close();

    TarArchiveEntry entry = new TarArchiveEntry(
            Paths.get(".openshift", CONFIG_DIRECTORY, "/standalone.xml").toFile());
    byte[] xmlData = SAMPLE_STANDALONE_DATA.getBytes();
    entry.setSize(xmlData.length);
    aos.putArchiveEntry(entry);
    IOUtils.write(xmlData, aos);

    for (int i = 0; i < filepaths.length; i++) {
        String filepath = filepaths[i];
        TarArchiveEntry emptyEntry = new TarArchiveEntry(Paths.get(filepath).toFile());
        byte[] emptyData = "".getBytes();
        emptyEntry.setSize(emptyData.length);
        aos.putArchiveEntry(emptyEntry);
        IOUtils.write(emptyData, aos);
        aos.closeArchiveEntry();
    }

    aos.finish();
    cos.close();
    bos.flush();
    return ByteBuffer.wrap(bos.toByteArray());
}

From source file:net.sf.util.zip.analyzer.CompressorAnalyzer.java

@Override
public List<String> analyze(File f) throws Exception {
    //System.out.println("Inside CompressorAnalyzer.analyze(f)");
    FileInputStream fin = new FileInputStream(f);
    String[] res = FileNameUtil.getCompressFileType(f.getName());
    CompressorInputStream in = getCompressedInputStream(res[0], fin);
    File tmpDir = createTmpDir(res[1]);
    IOUtils.copy(in, new FileOutputStream(tmpDir));
    in.close();
    fin.close();/*w ww  . j a  va 2s .com*/

    List<String> entries = new ArrayList<String>();
    addEntry(f.getAbsolutePath(), entries);
    list(tmpDir, f.getAbsolutePath().replace("\\", "/") + "/" + tmpDir.getName(), entries);
    tmpDir.delete();

    return entries;
}

From source file:net.sf.util.zip.analyzer.CompressorAnalyzer.java

@Override
public List<String> analyze(InputStream fin, String path) throws Exception {
    //System.out.println("Inside CompressorAnalyzer.analyze(fin,path): <path>:"+path);
    String nPath = path.replace("\\", "/");
    if (nPath.indexOf("/") != -1)
        nPath = nPath.substring(nPath.lastIndexOf("/") + 1);

    String[] res = FileNameUtil.getCompressFileType(nPath);
    CompressorInputStream in = getCompressedInputStream(res[0], fin);
    File tmpDir = createTmpDir(res[1]);
    IOUtils.copy(in, new FileOutputStream(tmpDir));
    in.close();

    List<String> entries = new ArrayList<String>();
    addEntry(path, entries);//from   w w w . j a va 2  s  .  co m
    list(tmpDir, path + "/" + tmpDir.getName(), entries);
    tmpDir.delete();

    return entries;
}

From source file:ch.cern.dss.teamcity.agent.util.ArchiveExtractor.java

/**
 * @param archivePath/*from w w w  . ja va2 s. c  o  m*/
 * @return
 * @throws ArchiveException
 * @throws IOException
 * @throws CompressorException
 */
public String decompress(String archivePath) throws ArchiveException, IOException, CompressorException {

    Loggers.AGENT.debug("Decompressing: " + archivePath);
    String tarPath = FilenameUtils.removeExtension(archivePath);

    final BufferedInputStream is = new BufferedInputStream(new FileInputStream(archivePath));
    CompressorInputStream in = new CompressorStreamFactory().createCompressorInputStream(is);
    org.apache.commons.compress.utils.IOUtils.copy(in, new FileOutputStream(tarPath));
    in.close();

    return tarPath;
}

From source file:com.cloud.storage.template.VhdProcessor.java

private boolean checkCompressed(String fileName) throws IOException {

    FileInputStream fin = null;//ww  w. ja  v  a 2  s .  co m
    BufferedInputStream bin = null;
    CompressorInputStream cin = null;

    try {
        fin = new FileInputStream(fileName);
        bin = new BufferedInputStream(fin);
        cin = new CompressorStreamFactory().createCompressorInputStream(bin);

    } catch (CompressorException e) {
        s_logger.warn(e.getMessage());
        return false;

    } catch (FileNotFoundException e) {
        s_logger.warn(e.getMessage());
        return false;
    } finally {
        if (cin != null)
            cin.close();
        else if (bin != null)
            bin.close();
    }
    return true;
}

From source file:mj.ocraptor.extraction.tika.parser.pkg.CompressorParser.java

public void parse(InputStream stream, ContentHandler handler, Metadata metadata, ParseContext context)
        throws IOException, SAXException, TikaException {
    // At the end we want to close the compression stream to release
    // any associated resources, but the underlying document stream
    // should not be closed
    stream = new CloseShieldInputStream(stream);

    // Ensure that the stream supports the mark feature
    stream = new BufferedInputStream(stream);

    CompressorInputStream cis;
    try {/*from w ww  .  ja va 2s  .  com*/
        CompressorStreamFactory factory = new CompressorStreamFactory();
        CompressorParserOptions options = context.get(CompressorParserOptions.class,
                new CompressorParserOptions() {
                    public boolean decompressConcatenated(Metadata metadata) {
                        return false;
                    }
                });
        factory.setDecompressConcatenated(options.decompressConcatenated(metadata));
        cis = factory.createCompressorInputStream(stream);
    } catch (CompressorException e) {
        throw new TikaException("Unable to uncompress document stream", e);
    }

    MediaType type = getMediaType(cis);
    if (!type.equals(MediaType.OCTET_STREAM)) {
        metadata.set(CONTENT_TYPE, type.toString());
    }

    XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata);
    xhtml.startDocument();

    try {
        Metadata entrydata = new Metadata();
        String name = metadata.get(Metadata.RESOURCE_NAME_KEY);
        if (name != null) {
            if (name.endsWith(".tbz")) {
                name = name.substring(0, name.length() - 4) + ".tar";
            } else if (name.endsWith(".tbz2")) {
                name = name.substring(0, name.length() - 5) + ".tar";
            } else if (name.endsWith(".bz")) {
                name = name.substring(0, name.length() - 3);
            } else if (name.endsWith(".bz2")) {
                name = name.substring(0, name.length() - 4);
            } else if (name.endsWith(".xz")) {
                name = name.substring(0, name.length() - 3);
            } else if (name.endsWith(".pack")) {
                name = name.substring(0, name.length() - 5);
            } else if (name.length() > 0) {
                name = GzipUtils.getUncompressedFilename(name);
            }
            entrydata.set(Metadata.RESOURCE_NAME_KEY, name);
        }

        // Use the delegate parser to parse the compressed document
        EmbeddedDocumentExtractor extractor = context.get(EmbeddedDocumentExtractor.class,
                new ParsingEmbeddedDocumentExtractor(context));
        if (extractor.shouldParseEmbedded(entrydata)) {
            extractor.parseEmbedded(cis, xhtml, entrydata, true);
        }
    } finally {
        cis.close();
    }

    xhtml.endDocument();
}

From source file:org.apache.tika.parser.pkg.CompressorParser.java

public void parse(InputStream stream, ContentHandler handler, Metadata metadata, ParseContext context)
        throws IOException, SAXException, TikaException {
    // At the end we want to close the compression stream to release
    // any associated resources, but the underlying document stream
    // should not be closed
    if (stream.markSupported()) {
        stream = new CloseShieldInputStream(stream);
    } else {/*from   w  ww .  j  av  a2s  . c  om*/
        // Ensure that the stream supports the mark feature
        stream = new BufferedInputStream(new CloseShieldInputStream(stream));
    }

    CompressorInputStream cis;
    try {
        CompressorParserOptions options = context.get(CompressorParserOptions.class,
                new CompressorParserOptions() {
                    public boolean decompressConcatenated(Metadata metadata) {
                        return false;
                    }
                });
        TikaCompressorStreamFactory factory = new TikaCompressorStreamFactory(
                options.decompressConcatenated(metadata), memoryLimitInKb);
        cis = factory.createCompressorInputStream(stream);
    } catch (CompressorException e) {
        if (e.getMessage() != null && e.getMessage().startsWith("MemoryLimitException:")) {
            throw new TikaMemoryLimitException(e.getMessage());
        }
        throw new TikaException("Unable to uncompress document stream", e);
    }

    MediaType type = getMediaType(cis);
    if (!type.equals(MediaType.OCTET_STREAM)) {
        metadata.set(CONTENT_TYPE, type.toString());
    }

    XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata);
    xhtml.startDocument();

    try {
        Metadata entrydata = new Metadata();
        String name = metadata.get(Metadata.RESOURCE_NAME_KEY);
        if (name != null) {
            if (name.endsWith(".tbz")) {
                name = name.substring(0, name.length() - 4) + ".tar";
            } else if (name.endsWith(".tbz2")) {
                name = name.substring(0, name.length() - 5) + ".tar";
            } else if (name.endsWith(".bz")) {
                name = name.substring(0, name.length() - 3);
            } else if (name.endsWith(".bz2")) {
                name = name.substring(0, name.length() - 4);
            } else if (name.endsWith(".xz")) {
                name = name.substring(0, name.length() - 3);
            } else if (name.endsWith(".zlib")) {
                name = name.substring(0, name.length() - 5);
            } else if (name.endsWith(".pack")) {
                name = name.substring(0, name.length() - 5);
            } else if (name.length() > 0) {
                name = GzipUtils.getUncompressedFilename(name);
            }
            entrydata.set(Metadata.RESOURCE_NAME_KEY, name);
        }

        // Use the delegate parser to parse the compressed document
        EmbeddedDocumentExtractor extractor = EmbeddedDocumentUtil.getEmbeddedDocumentExtractor(context);
        if (extractor.shouldParseEmbedded(entrydata)) {
            extractor.parseEmbedded(cis, xhtml, entrydata, true);
        }
    } finally {
        cis.close();
    }

    xhtml.endDocument();
}

From source file:org.crosswire.common.compress.BZip2.java

public ByteArrayOutputStream uncompress(int expectedLength) throws IOException {
    ByteArrayOutputStream out = new ByteArrayOutputStream(expectedLength);
    CompressorInputStream in = new BZip2CompressorInputStream(input);
    IOUtils.copy(in, out);/*from w  ww.  java2  s  .  c o  m*/
    in.close();
    out.flush();
    out.close();
    return out;
}

From source file:org.crosswire.common.compress.Gzip.java

public ByteArrayOutputStream uncompress(int expectedLength) throws IOException {
    ByteArrayOutputStream out = new ByteArrayOutputStream(expectedLength);
    CompressorInputStream in = new GzipCompressorInputStream(input);
    IOUtils.copy(in, out);//from w w  w  .  j  av  a2s.c om
    in.close();
    out.flush();
    out.close();
    return out;
}