List of usage examples for org.apache.commons.compress.compressors CompressorStreamFactory GZIP
String GZIP
To view the source code for org.apache.commons.compress.compressors CompressorStreamFactory GZIP.
Click Source Link
From source file:co.turnus.trace.io.XmlTraceStreamWriter.java
public XmlTraceStreamWriter(File file) { switch (TurnusUtils.getExtension(file)) { case TurnusExtension.TRACE_COMPRESSED: compressedXml = true;//from w w w . j a v a2 s.co m break; case TurnusExtension.TRACE: compressedXml = false; break; default: throw new TurnusRuntimeException("Trace file writer: unsupported file extension"); } try { stream = new FileOutputStream(file); if (compressedXml) { stream = new CompressorStreamFactory().createCompressorOutputStream(CompressorStreamFactory.GZIP, stream); } stream = new BufferedOutputStream(stream); } catch (Exception e) { throw new TurnusRuntimeException("Error opening the trace file output stream", e.getCause()); } }
From source file:com.msd.gin.halyard.tools.HalyardExportTest.java
@Test public void testExport_CSV_GZ() throws Exception { HalyardExport.main(/*from w w w . ja v a 2 s. c o m*/ new String[] { "-s", TABLE, "-q", TUPLE_QUERY, "-t", ROOT + name.getMethodName() + ".csv.gz" }); assertEquals(1001, getLinesCount(ROOT + name.getMethodName() + ".csv.gz", CompressorStreamFactory.GZIP)); }
From source file:com.gitblit.utils.CompressionUtils.java
/** * tar.gz the contents of the tree at the (optionally) specified revision and * the (optionally) specified basepath to the supplied outputstream. * // ww w . j av a 2s . com * @param repository * @param basePath * if unspecified, entire repository is assumed. * @param objectId * if unspecified, HEAD is assumed. * @param os * @return true if repository was successfully zipped to supplied output * stream */ public static boolean gz(Repository repository, String basePath, String objectId, OutputStream os) { return tar(CompressorStreamFactory.GZIP, repository, basePath, objectId, os); }
From source file:com.gitblit.servlet.PtServlet.java
@Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try {/* w ww. j a v a 2s . co m*/ response.setContentType("application/octet-stream"); response.setDateHeader("Last-Modified", lastModified); response.setHeader("Cache-Control", "none"); response.setHeader("Pragma", "no-cache"); response.setDateHeader("Expires", 0); boolean windows = false; try { String useragent = request.getHeader("user-agent").toString(); windows = useragent.toLowerCase().contains("windows"); } catch (Exception e) { } byte[] pyBytes; File file = runtimeManager.getFileOrFolder("tickets.pt", "${baseFolder}/pt.py"); if (file.exists()) { // custom script pyBytes = readAll(new FileInputStream(file)); } else { // default script pyBytes = readAll(getClass().getResourceAsStream("/pt.py")); } if (windows) { // windows: download zip file with pt.py and pt.cmd response.setHeader("Content-Disposition", "attachment; filename=\"pt.zip\""); OutputStream os = response.getOutputStream(); ZipArchiveOutputStream zos = new ZipArchiveOutputStream(os); // add the Python script ZipArchiveEntry pyEntry = new ZipArchiveEntry("pt.py"); pyEntry.setSize(pyBytes.length); pyEntry.setUnixMode(FileMode.EXECUTABLE_FILE.getBits()); pyEntry.setTime(lastModified); zos.putArchiveEntry(pyEntry); zos.write(pyBytes); zos.closeArchiveEntry(); // add a Python launch cmd file byte[] cmdBytes = readAll(getClass().getResourceAsStream("/pt.cmd")); ZipArchiveEntry cmdEntry = new ZipArchiveEntry("pt.cmd"); cmdEntry.setSize(cmdBytes.length); cmdEntry.setUnixMode(FileMode.REGULAR_FILE.getBits()); cmdEntry.setTime(lastModified); zos.putArchiveEntry(cmdEntry); zos.write(cmdBytes); zos.closeArchiveEntry(); // add a brief readme byte[] txtBytes = readAll(getClass().getResourceAsStream("/pt.txt")); ZipArchiveEntry txtEntry = new ZipArchiveEntry("readme.txt"); txtEntry.setSize(txtBytes.length); txtEntry.setUnixMode(FileMode.REGULAR_FILE.getBits()); txtEntry.setTime(lastModified); zos.putArchiveEntry(txtEntry); zos.write(txtBytes); zos.closeArchiveEntry(); // cleanup zos.finish(); zos.close(); os.flush(); } else { // unix: download a tar.gz file with pt.py set with execute permissions response.setHeader("Content-Disposition", "attachment; filename=\"pt.tar.gz\""); OutputStream os = response.getOutputStream(); CompressorOutputStream cos = new CompressorStreamFactory() .createCompressorOutputStream(CompressorStreamFactory.GZIP, os); TarArchiveOutputStream tos = new TarArchiveOutputStream(cos); tos.setAddPaxHeadersForNonAsciiNames(true); tos.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX); // add the Python script TarArchiveEntry pyEntry = new TarArchiveEntry("pt"); pyEntry.setMode(FileMode.EXECUTABLE_FILE.getBits()); pyEntry.setModTime(lastModified); pyEntry.setSize(pyBytes.length); tos.putArchiveEntry(pyEntry); tos.write(pyBytes); tos.closeArchiveEntry(); // add a brief readme byte[] txtBytes = readAll(getClass().getResourceAsStream("/pt.txt")); TarArchiveEntry txtEntry = new TarArchiveEntry("README"); txtEntry.setMode(FileMode.REGULAR_FILE.getBits()); txtEntry.setModTime(lastModified); txtEntry.setSize(txtBytes.length); tos.putArchiveEntry(txtEntry); tos.write(txtBytes); tos.closeArchiveEntry(); // cleanup tos.finish(); tos.close(); cos.close(); os.flush(); } } catch (Exception e) { e.printStackTrace(); } }
From source file:com.msd.gin.halyard.tools.HalyardExportTest.java
@Test public void testExport_NT_GZ() throws Exception { HalyardExport.main(//from w w w . j a v a2s.c o m new String[] { "-s", TABLE, "-q", GRAPH_QUERY, "-t", ROOT + name.getMethodName() + ".nt.gz" }); assertEquals(1000, getTriplesCount(ROOT + name.getMethodName() + ".nt.gz", CompressorStreamFactory.GZIP, RDFFormat.NTRIPLES)); }
From source file:co.turnus.trace.io.XmlTraceReader.java
public XmlTraceReader(File file, TraceFactory factory) { try {// w w w . ja v a2 s . c om XMLInputFactory inputFactory = XMLInputFactory.newInstance(); String extension = TurnusUtils.getExtension(file); if (!extension.equals(TurnusExtension.TRACE) && !extension.equals(TurnusExtension.TRACE_COMPRESSED)) { throw new TurnusRuntimeException("Trace file reader: unsupported extension"); } InputStream stream = new BufferedInputStream(new FileInputStream(file)); if (extension.equals(TurnusExtension.TRACE_COMPRESSED)) { stream = new CompressorStreamFactory().createCompressorInputStream(CompressorStreamFactory.GZIP, stream); } reader = inputFactory.createXMLStreamReader(stream); } catch (Exception e) { throw new TurnusRuntimeException("Error initializing the trace reader", e.getCause()); } this.factory = factory; tempDep = new TempDependency(); tempStep = new TempStep(); }
From source file:adams.core.io.GzipUtils.java
/** * Compresses the specified gzip archive. * * @param inputFile the gzip file to compress * @param buffer the buffer size to use * @param outputFile the destination file (the archive) * @param removeInput whether to remove the input file * @return the error message, null if everything OK *///ww w .j av a 2s . co m @MixedCopyright(copyright = "Apache compress commons", license = License.APACHE2, url = "http://commons.apache.org/compress/apidocs/org/apache/commons/compress/compressors/CompressorStreamFactory.html") public static String compress(File inputFile, int buffer, File outputFile, boolean removeInput) { String result; FileInputStream in; FileOutputStream fos; CompressorOutputStream out; String msg; in = null; out = null; fos = null; result = null; try { // does file already exist? if (outputFile.exists()) System.err.println("WARNING: overwriting '" + outputFile + "'!"); in = new FileInputStream(inputFile.getAbsolutePath()); fos = new FileOutputStream(outputFile.getAbsolutePath()); out = new CompressorStreamFactory().createCompressorOutputStream(CompressorStreamFactory.GZIP, fos); IOUtils.copy(in, out, buffer); FileUtils.closeQuietly(in); FileUtils.closeQuietly(out); FileUtils.closeQuietly(fos); in = null; out = null; fos = null; // remove input file? if (removeInput) { if (!inputFile.delete()) result = "Failed to delete input file '" + inputFile + "' after successful compression!"; } } catch (Exception e) { msg = "Failed to compress '" + inputFile + "': "; System.err.println(msg); e.printStackTrace(); result = msg + e; } finally { FileUtils.closeQuietly(in); FileUtils.closeQuietly(out); FileUtils.closeQuietly(fos); } return result; }
From source file:com.continuuity.weave.kafka.client.KafkaTest.java
private static File extractKafka() throws IOException, ArchiveException, CompressorException { File kafkaExtract = TMP_FOLDER.newFolder(); InputStream kakfaResource = KafkaTest.class.getClassLoader().getResourceAsStream("kafka-0.7.2.tgz"); ArchiveInputStream archiveInput = new ArchiveStreamFactory() .createArchiveInputStream(ArchiveStreamFactory.TAR, new CompressorStreamFactory() .createCompressorInputStream(CompressorStreamFactory.GZIP, kakfaResource)); try {/*from w w w. j ava 2 s. c o m*/ ArchiveEntry entry = archiveInput.getNextEntry(); while (entry != null) { File file = new File(kafkaExtract, entry.getName()); if (entry.isDirectory()) { file.mkdirs(); } else { ByteStreams.copy(archiveInput, Files.newOutputStreamSupplier(file)); } entry = archiveInput.getNextEntry(); } } finally { archiveInput.close(); } return kafkaExtract; }
From source file:ezbake.protect.ezca.EzCABootstrap.java
public static void createAndWriteTarball(String name, AppCerts certs, String filePath) { TarArchiveOutputStream os = null;/*from w ww.java 2 s .c o m*/ try { File outputFile = new File(filePath, name + ".tar.gz"); outputFile.createNewFile(); outputFile.setWritable(false, false); outputFile.setWritable(true, true); outputFile.setReadable(false, false); outputFile.setReadable(true, true); FileOutputStream fos = new FileOutputStream(outputFile); CompressorOutputStream cos = new CompressorStreamFactory() .createCompressorOutputStream(CompressorStreamFactory.GZIP, fos); os = new TarArchiveOutputStream(cos); // For each field in the app certs, create an entry in the tar archive for (AppCerts._Fields field : AppCerts._Fields.values()) { Object o = certs.getFieldValue(field); if (o instanceof byte[]) { String fieldName = field.getFieldName().replace("_", "."); addTarArchiveEntry(os, fieldName, (byte[]) o); } } } catch (FileNotFoundException e) { logger.error("Unable to write tarball", e); } catch (CompressorException e) { logger.error("Error compressing tarball", e); } catch (IOException e) { logger.error("Error creating output file for tarball", e); } finally { if (os != null) { try { os.finish(); os.close(); } catch (IOException e) { logger.warn("Unable to close output stream", e); } } } }
From source file:com.msd.gin.halyard.tools.HalyardExport.java
/** * Export function is called for the export execution with given arguments. * @param conf Hadoop Configuration instance * @param log StatusLog notification service implementation for back-calls * @param source String source HTable name * @param query String SPARQL Graph query * @param targetUrl String URL of the target system (+folder or schema, +table or file name) * @param driverClass String JDBC Driver class name (for JDBC export only) * @param driverClasspath Array of URLs with JDBC Driver classpath (for JDB export only) * @param jdbcProperties Arrays of String JDBC connection properties (for JDB export only) * @param trimTable boolean option to trim target JDBC table before export (for JDB export only) * @throws ExportException in case of an export problem *//* w w w. j ava 2s . c o m*/ public static void export(Configuration conf, StatusLog log, String source, String query, String targetUrl, String driverClass, URL[] driverClasspath, String[] jdbcProperties, boolean trimTable) throws ExportException { try { QueryResultWriter writer = null; if (targetUrl.startsWith("file:") || targetUrl.startsWith("hdfs:")) { OutputStream out = FileSystem.get(URI.create(targetUrl), conf).create(new Path(targetUrl)); try { if (targetUrl.endsWith(".bz2")) { out = new CompressorStreamFactory() .createCompressorOutputStream(CompressorStreamFactory.BZIP2, out); targetUrl = targetUrl.substring(0, targetUrl.length() - 4); } else if (targetUrl.endsWith(".gz")) { out = new CompressorStreamFactory() .createCompressorOutputStream(CompressorStreamFactory.GZIP, out); targetUrl = targetUrl.substring(0, targetUrl.length() - 3); } } catch (CompressorException e) { IOUtils.closeQuietly(out); throw new ExportException(e); } if (targetUrl.endsWith(".csv")) { writer = new CSVResultWriter(log, out); } else { Optional<RDFFormat> form = Rio.getWriterFormatForFileName(targetUrl); if (!form.isPresent()) throw new ExportException("Unsupported target file format extension: " + targetUrl); writer = new RIOResultWriter(log, form.get(), out); } } else if (targetUrl.startsWith("jdbc:")) { int i = targetUrl.lastIndexOf('/'); if (i < 0) throw new ExportException("Taret URL does not end with /<table_name>"); if (driverClass == null) throw new ExportException( "Missing mandatory JDBC driver class name argument -c <driver_class>"); writer = new JDBCResultWriter(log, targetUrl.substring(0, i), targetUrl.substring(i + 1), jdbcProperties, driverClass, driverClasspath, trimTable); } else { throw new ExportException("Unsupported target URL protocol " + targetUrl); } new HalyardExport(source, query, writer, log).run(conf); } catch (IOException e) { throw new ExportException(e); } }