List of usage examples for org.apache.commons.compress.compressors.xz XZCompressorInputStream XZCompressorInputStream
public XZCompressorInputStream(final InputStream inputStream) throws IOException
From source file:de.flapdoodle.embed.process.extract.TxzExtractor.java
protected ArchiveWrapper archiveStream(File source) throws IOException { FileInputStream fin = new FileInputStream(source); BufferedInputStream in = new BufferedInputStream(fin); XZCompressorInputStream gzIn = new XZCompressorInputStream(in); TarArchiveInputStream tarIn = new TarArchiveInputStream(gzIn); return new TarArchiveWrapper(tarIn); }
From source file:de.tudarmstadt.ukp.dkpro.core.api.resources.CompressionUtils.java
/** * Get an uncompressed input stream for a given input stream created for a particular location. * /* w w w .j a va 2s . c o m*/ * @param aLocation a resource location (e.g. a path, url, etc.) * @param aStream a raw stream of potentially compressed data. * @return stream wrapped with a decompressing stream. */ public static InputStream getInputStream(String aLocation, InputStream aStream) throws IOException { String lcLocation = aLocation.toLowerCase(); if (lcLocation.endsWith(GZIP.getExtension())) { return new GZIPInputStream(aStream); } else if (lcLocation.endsWith(BZIP2.getExtension()) || lcLocation.endsWith(".bzip2")) { return new BZip2CompressorInputStream(aStream); } else if (lcLocation.endsWith(XZ.getExtension())) { return new XZCompressorInputStream(aStream); } else { return aStream; } }
From source file:deodex.tools.ZipTools.java
/** * extract an odex file from .xz file/*from w ww . j a v a 2s . c o m*/ * * @returns success only if an odex file was extracted * @param odex * the odex file to decompress * @throws IOException * well we are using IOs Exception might be thrown */ public static boolean extractOdex(File odex) throws IOException { File Decomdex; if (odex.getName().endsWith(S.ODEX_EXT)) { Logger.appendLog("[ZipTools][I]Decompressing " + odex.getName() + " not needed"); return true; } else if (odex.getName().endsWith(S.COMP_GZ_ODEX_EXT)) { Logger.appendLog("[ZipTools][I]Decompressing " + odex.getName() + " gzip detected ..."); return TarGzUtils.unGzipOdex(odex, odex.getParentFile()); } else { Logger.appendLog("[ZipTools][I]Decompressing " + odex.getName() + " xz compression detected ..."); Decomdex = new File(odex.getParentFile().getAbsolutePath() + "/" + StringUtils.getCropString(odex.getName(), odex.getName().length() - 3)); Logger.appendLog("[ZipTools][I]Decompressing " + odex.getAbsolutePath() + " to " + Decomdex.getAbsolutePath()); FileInputStream fin = new FileInputStream(odex); BufferedInputStream in = new BufferedInputStream(fin); FileOutputStream out = new FileOutputStream(Decomdex); XZCompressorInputStream xzIn = new XZCompressorInputStream(in); final byte[] buffer = new byte[32768]; int n = 0; while (-1 != (n = xzIn.read(buffer))) { out.write(buffer, 0, n); } out.close(); xzIn.close(); } Logger.appendLog("[ZipTools][I]Decompressing " + odex.getAbsolutePath() + " to " + Decomdex.getAbsolutePath() + " success ? " + Decomdex.exists()); return Decomdex.exists(); }
From source file:kr.debop4j.core.compress.XZCompressor.java
@Override protected byte[] doDecompress(byte[] compressed) throws IOException { @Cleanup/*from ww w . jav a2s . co m*/ ByteArrayOutputStream bos = new ByteArrayOutputStream(); @Cleanup ByteArrayInputStream bis = new ByteArrayInputStream(compressed); @Cleanup XZCompressorInputStream xz = new XZCompressorInputStream(bis); byte[] buff = new byte[BUFFER_SIZE]; int n; while ((n = xz.read(buff, 0, BUFFER_SIZE)) > 0) { bos.write(buff, 0, n); } return bos.toByteArray(); }
From source file:com.torchmind.upm.bundle.BasicResource.java
/** * {@inheritDoc}// w ww.j av a2s. co m */ @Nonnull @Override public ArchiveInputStream createArchiveInputStream() throws IllegalStateException, IOException { switch (this.type) { case RAW: throw new IllegalStateException("Cannot convert RAW resource into archive"); case JAR: return new JarArchiveInputStream(this.createInputStream()); case TAR_ARCHIVE: return new TarArchiveInputStream(this.createInputStream()); case TAR_GZ_ARCHIVE: return new TarArchiveInputStream(new GzipCompressorInputStream(this.createInputStream())); case TAR_XZ_ARCHIVE: return new TarArchiveInputStream(new XZCompressorInputStream(this.createInputStream())); case ZIP_ARCHIVE: return new ZipArchiveInputStream(this.createInputStream()); } throw new UnsupportedOperationException("No such resource type: " + this.type); }
From source file:com.espringtran.compressor4j.processor.XzProcessor.java
/** * Read from compressed file/* w w w . j a v a 2s .c o m*/ * * @param srcPath * path of compressed file * @param fileCompressor * FileCompressor object * @throws Exception */ @Override public void read(String srcPath, FileCompressor fileCompressor) throws Exception { long t1 = System.currentTimeMillis(); byte[] data = FileUtil.convertFileToByte(srcPath); ByteArrayInputStream bais = new ByteArrayInputStream(data); XZCompressorInputStream cis = new XZCompressorInputStream(bais); TarArchiveInputStream ais = new TarArchiveInputStream(cis); ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { byte[] buffer = new byte[1024]; int readByte; TarArchiveEntry entry = ais.getNextTarEntry(); while (entry != null && entry.getSize() > 0) { long t2 = System.currentTimeMillis(); baos = new ByteArrayOutputStream(); readByte = ais.read(buffer); while (readByte != -1) { baos.write(buffer, 0, readByte); readByte = ais.read(buffer); } BinaryFile binaryFile = new BinaryFile(entry.getName(), baos.toByteArray()); fileCompressor.addBinaryFile(binaryFile); LogUtil.createAddFileLog(fileCompressor, binaryFile, t2, System.currentTimeMillis()); entry = ais.getNextTarEntry(); } } catch (Exception e) { FileCompressor.LOGGER.error("Error on get compressor file", e); } finally { baos.close(); ais.close(); cis.close(); bais.close(); } LogUtil.createReadLog(fileCompressor, srcPath, data.length, t1, System.currentTimeMillis()); }
From source file:com.playonlinux.core.utils.archive.Tar.java
List<File> uncompressTarXzFile(File inputFile, File outputDir, Consumer<ProgressEntity> stateCallback) { try (CountingInputStream countingInputStream = new CountingInputStream(new FileInputStream(inputFile)); InputStream inputStream = new XZCompressorInputStream(countingInputStream)) { final long finalSize = FileUtils.sizeOf(inputFile); return uncompress(inputStream, countingInputStream, outputDir, finalSize, stateCallback); } catch (IOException e) { throw new ArchiveException(TAR_ERROR_MESSAGE, e); }/*from ww w .j a v a2 s . c o m*/ }
From source file:ilps.hadoop.ThriftRecordReader.java
/** * Boilerplate initialization code for file input streams. * //from w w w . jav a 2 s. c om * Tuan - Add the .xz decompressor here * */ @Override public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { conf = context.getConfiguration(); fileSplit = (FileSplit) split; start = fileSplit.getStart(); length = fileSplit.getLength(); position = 0; Path path = fileSplit.getPath(); FileSystem fs = path.getFileSystem(conf); // Some files are corrupted, report them and move on try { fis = fs.open(path); bis = new BufferedInputStream(fis); xzis = new XZCompressorInputStream(bis); transport = new TIOStreamTransport(xzis); } catch (IOException e) { LOG.error("Bad file: ", path.toString()); e.printStackTrace(); } try { if (transport != null) transport.open(); // Skip this file else { fis = null; return; } } catch (TTransportException e) { e.printStackTrace(); throw new IOException(e); } factory = new TBinaryProtocol.Factory(); tp = factory.getProtocol(transport); value = new StreamItemWritable(factory); }
From source file:de.tudarmstadt.ukp.csniper.webapp.search.xmi.SerializedCasContextProvider.java
@Override public ItemContext getContext(EvaluationItem aItem, int aLeftSize, int aRightSize) throws IOException { Timer timer = new Timer(); File base = new File(new File(corpusService.getRepositoryPath(), aItem.getCollectionId().toUpperCase()), BIN);// ww w . j a v a 2s . c o m String docId = aItem.getDocumentId(); JCasState state = jcasThreadLocal.get(); if ((state.documentId == null) || (state.collectionId == null) || !StringUtils.equals(state.documentId, docId) || !StringUtils.equals(state.collectionId, aItem.getCollectionId())) { timer.start(); ObjectInputStream is = null; try { // No need to reset the CAS is = new ObjectInputStream( new XZCompressorInputStream(new FileInputStream(new File(base, docId + ".ser.xz")))); CASCompleteSerializer serializer = (CASCompleteSerializer) is.readObject(); deserializeCASComplete(serializer, (CASImpl) state.cas); state.documentId = aItem.getDocumentId(); state.collectionId = aItem.getCollectionId(); } catch (IllegalStateException e) { throw new IOException(e); } catch (ClassNotFoundException e) { throw new IOException(e); } finally { closeQuietly(is); } timer.stop(); log.debug("Reading the CAS took " + timer.getTime() + "ms"); } else { log.debug("Reusing CAS"); } timer.reset(); timer.start(); // text offset based String text = state.cas.getDocumentText(); // Absolute offsets int windowBegin = Math.max(0, (int) aItem.getBeginOffset() - aLeftSize); int windowEnd = Math.min(text.length(), (int) aItem.getEndOffset() + aRightSize); // Relative offsets int unitBegin = (int) aItem.getBeginOffset() - windowBegin; int unitEnd = (int) aItem.getEndOffset() - windowBegin; StringBuilder windowText = new StringBuilder(text.substring(windowBegin, windowEnd)); List<Token> tokens; try { tokens = JCasUtil.selectCovered(state.cas.getJCas(), Token.class, (int) aItem.getBeginOffset(), (int) aItem.getEndOffset()); } catch (CASException e) { throw new IOException(e); } int unitEndDisplacement = 0; int matchEndDisplacement = 0; int matchBeginDisplacement = 0; boolean anyMatchSet = false; int matchBeginOffset = aItem.getOriginalTextMatchBegin(); int matchEndOffset = aItem.getOriginalTextMatchEnd(); if (aItem.isOriginalMatchSet()) { matchBeginOffset = aItem.getOriginalTextMatchBegin(); matchEndOffset = aItem.getOriginalTextMatchEnd(); anyMatchSet = true; } else if (aItem.isTokenMatchSet()) { matchBeginOffset = tokens.get(aItem.getTokenMatchBegin()).getBegin(); matchEndOffset = tokens.get(aItem.getTokenMatchEnd()).getEnd(); anyMatchSet = true; } Collections.reverse(tokens); // compute actual offsets if token based offsets are set if (outputPos) { for (Token t : tokens) { if (t.getPos() != null && t.getPos().getPosValue() != null) { String postfix = "/" + t.getPos().getPosValue(); windowText.insert(t.getEnd() - windowBegin, postfix); unitEndDisplacement += postfix.length(); if (anyMatchSet) { if ((t.getEnd() <= matchEndOffset) && (t.getBegin() >= matchBeginOffset)) { matchEndDisplacement += postfix.length(); } if (t.getEnd() <= matchBeginOffset) { matchBeginDisplacement += postfix.length(); } } } } } ItemContext ctx = new ItemContext(windowText.toString(), windowBegin, windowEnd, unitBegin, unitEnd + unitEndDisplacement); if (anyMatchSet) { ctx.setMatch(matchBeginOffset - windowBegin + matchBeginDisplacement, matchEndOffset - windowBegin + matchBeginDisplacement + matchEndDisplacement); } ctx.setTextLength(text.length()); timer.stop(); log.debug("Extracting the context took " + timer.getTime() + "ms"); return ctx; }
From source file:hudson.plugins.report.jck.parsers.JtregReportParser.java
private static Map<String, ArchiveFactory> createSupportedArchiveTypesMap() { Map<String, ArchiveFactory> map = new HashMap<>(); map.put(".zip", in -> new ZipArchiveInputStream(in)); map.put(".tar", in -> new TarArchiveInputStream(in)); map.put(".tar.gz", in -> new TarArchiveInputStream(new GzipCompressorInputStream(in))); map.put(".tar.bz2", in -> new TarArchiveInputStream(new BZip2CompressorInputStream(in))); map.put(".tar.xz", in -> new TarArchiveInputStream(new XZCompressorInputStream(in))); return Collections.unmodifiableMap(map); }