Example usage for java.util.zip GZIPOutputStream GZIPOutputStream

List of usage examples for java.util.zip GZIPOutputStream GZIPOutputStream

Introduction

In this page you can find the example usage for java.util.zip GZIPOutputStream GZIPOutputStream.

Prototype

public GZIPOutputStream(OutputStream out) throws IOException 

Source Link

Document

Creates a new output stream with a default buffer size.

Usage

From source file:com.ctriposs.r2.filter.compression.GzipCompressor.java

@Override
public byte[] deflate(InputStream data) throws CompressionException {
    ByteArrayOutputStream out;//from  w  w  w.  j a v  a  2s  .  co  m
    GZIPOutputStream gzip = null;

    try {
        out = new ByteArrayOutputStream();
        gzip = new GZIPOutputStream(out);

        IOUtils.copy(data, gzip);
    } catch (IOException e) {
        throw new CompressionException(CompressionConstants.DECODING_ERROR + getContentEncodingName(), e);
    } finally {
        if (gzip != null) {
            IOUtils.closeQuietly(gzip);
        }
    }

    return out.toByteArray();
}

From source file:de.undercouch.gradle.tasks.download.CompressionTest.java

@Override
protected Handler[] makeHandlers() throws IOException {
    ContextHandler compressionHandler = new ContextHandler("/" + COMPRESSED) {
        @Override//  w w  w  .  j a v a 2s.  co  m
        public void handle(String target, HttpServletRequest request, HttpServletResponse response,
                int dispatch) throws IOException, ServletException {
            String acceptEncoding = request.getHeader("Accept-Encoding");
            boolean acceptGzip = "gzip".equals(acceptEncoding);

            response.setStatus(200);
            OutputStream os = response.getOutputStream();
            if (acceptGzip) {
                response.setHeader("Content-Encoding", "gzip");
                GZIPOutputStream gos = new GZIPOutputStream(os);
                OutputStreamWriter osw = new OutputStreamWriter(gos);
                osw.write("Compressed");
                osw.close();
                gos.flush();
                gos.close();
            } else {
                OutputStreamWriter osw = new OutputStreamWriter(os);
                osw.write("Uncompressed");
                osw.close();
            }
            os.close();
        }
    };
    return new Handler[] { compressionHandler };
}

From source file:ch.ledcom.jpreseed.InitrdRepacker.java

public final void repack(OutputStream out) throws IOException {
    // start new archive
    try (CpioArchiveInputStream cpioIn = new CpioArchiveInputStream(new GZIPInputStream(initrdGz));
            CpioArchiveOutputStream cpioOut = new CpioArchiveOutputStream(new GZIPOutputStream(out))) {
        CpioArchiveEntry cpioEntry;/*from  w w  w.  j  a v  a  2 s. c  om*/

        // add files from base archive
        while ((cpioEntry = cpioIn.getNextCPIOEntry()) != null) {
            if (!additionalFiles.keySet().contains(cpioEntry.getName())) {
                logger.info("Repacking [{}]", cpioEntry.getName());
                cpioOut.putArchiveEntry(cpioEntry);
                long bytesCopied = copy(cpioIn, cpioOut);
                cpioOut.closeArchiveEntry();
                logger.debug("Copied [{}] bytes", bytesCopied);
            }
        }

        // additional files
        for (Map.Entry<String, File> entry : additionalFiles.entrySet()) {
            logger.info("Packing new file [{}]", entry.getKey());
            ArchiveEntry additionalEntry = cpioOut.createArchiveEntry(entry.getValue(), entry.getKey());
            cpioOut.putArchiveEntry(additionalEntry);
            try (InputStream in = new FileInputStream(entry.getValue())) {
                copy(in, cpioOut);
            }
            cpioOut.closeArchiveEntry();
        }
    }
}

From source file:mzmatch.ipeak.normalisation.VanDeSompele.java

public static void main(String args[]) {
    try {/*from w w w  .  ja va2s.c o m*/
        Tool.init();

        // parse the commandline options
        Options options = new Options();
        CmdLineParser cmdline = new CmdLineParser(options);

        // check whether we need to show the help
        cmdline.parse(args);
        if (options.help) {
            Tool.printHeader(System.out, application, version);
            cmdline.printUsage(System.out, "");
            return;
        }

        if (options.verbose) {
            Tool.printHeader(System.out, application, version);
            cmdline.printOptions();
        }

        // check the command-line parameters
        {
            // if the output directories do not exist, create them
            if (options.output != null)
                Tool.createFilePath(options.output, true);
        }

        // load the data
        if (options.verbose)
            System.out.println("Loading data");
        ParseResult result = PeakMLParser.parse(new FileInputStream(options.input), true);

        Header header = result.header;
        IPeakSet<IPeakSet<? extends IPeak>> peaksets = (IPeakSet<IPeakSet<? extends IPeak>>) result.measurement;

        int nrmeasurements = header.getNrMeasurementInfos();

        // remove the stability factor annotation
        for (IPeak peak : peaksets)
            peak.removeAnnotation("stability factor");

        // load the database
        if (options.verbose)
            System.out.println("Loading the molecule database");
        HashMap<String, Molecule> database = MoleculeIO.parseXml(new FileInputStream(options.database));

        // filter the set to include only identifiable metabolites
        if (options.verbose)
            System.out.println("Creating selection");
        Vector<IPeakSet<? extends IPeak>> selection = new Vector<IPeakSet<? extends IPeak>>();
        for (Molecule molecule : database.values()) {
            double mass = molecule.getMass(Mass.MONOISOTOPIC);
            double delta = PeriodicTable.PPM(mass, options.ppm);

            // get the most intense peak containing all the measurements
            Vector<IPeakSet<? extends IPeak>> neighbourhoud = peaksets.getPeaksInMassRange(mass - delta,
                    mass + delta);
            Collections.sort(neighbourhoud, IPeak.sort_intensity_descending);
            for (IPeakSet<? extends IPeak> neighbour : neighbourhoud)
                if (count(neighbour) == nrmeasurements) {
                    selection.add(neighbour);
                    break;
                }
        }

        // calculate the stability factor for each peak in the selection
        if (options.verbose)
            System.out.println("Calculating stability factors");
        for (int peakid1 = 0; peakid1 < selection.size(); ++peakid1) {
            double stddeviations[] = new double[selection.size()];

            IPeakSet<? extends IPeak> peakset1 = selection.get(peakid1);
            for (int peakid2 = 0; peakid2 < selection.size(); ++peakid2) {
                IPeakSet<? extends IPeak> peakset2 = selection.get(peakid2);

                double values[] = new double[nrmeasurements];
                for (int measurementid = 0; measurementid < nrmeasurements; ++measurementid) {
                    int measurementid1 = peakset1.get(measurementid).getMeasurementID();
                    int setid1 = header.indexOfSetInfo(header.getSetInfoForMeasurementID(measurementid1));
                    int measurementid2 = peakset2.get(measurementid).getMeasurementID();
                    int setid2 = header.indexOfSetInfo(header.getSetInfoForMeasurementID(measurementid2));
                    if (setid1 != setid2 || measurementid1 != measurementid2)
                        System.err.println("[WARNING]: differing setid or spectrumid for comparison");

                    values[measurementid] = Math.log(peakset1.get(measurementid).getIntensity()
                            / peakset2.get(measurementid).getIntensity()) / Math.log(2);
                }
                stddeviations[peakid2] = Statistical.stddev(values);
            }

            peakset1.addAnnotation("stability factor", Statistical.mean(stddeviations));
        }

        // sort on the stability factor
        Collections.sort(selection, new IPeak.AnnotationAscending("stability factor"));

        // take the top 10% and calculate the geometric mean
        if (options.verbose)
            System.out.println("Calculating normalisation factors");
        int nrselected = (int) (0.1 * selection.size());
        if (nrselected < 10)
            nrselected = (10 < selection.size() ? 10 : selection.size());
        double normalization_factors[] = new double[nrmeasurements];
        for (int measurementid = 0; measurementid < nrmeasurements; ++measurementid) {
            double values[] = new double[nrselected];
            for (int i = 0; i < nrselected; ++i) {
                IPeak peak = selection.get(i).get(measurementid);
                values[i] = peak.getIntensity();
            }
            normalization_factors[measurementid] = Statistical.geomean(values);
        }

        // scale the found normalization factors
        double maxnf = Statistical.max(normalization_factors);
        for (int sampleid = 0; sampleid < nrmeasurements; ++sampleid)
            normalization_factors[sampleid] /= maxnf;

        // write the selection if needed
        if (options.selection != null) {
            if (options.verbose)
                System.out.println("Writing original selection data");

            PeakMLWriter.write(result.header, selection, null,
                    new GZIPOutputStream(new FileOutputStream(options.selection)), null);
        }

        // normalize all the peaks
        if (options.verbose)
            System.out.println("Normalizing all the entries");
        for (IPeakSet<? extends IPeak> peakset : peaksets) {
            for (int measurementid = 0; measurementid < nrmeasurements; ++measurementid) {
                // TODO why did I do this again ?
                int id = 0;
                int setid = 0;
                int spectrumid = 0;
                for (int i = 0; i < header.getNrSetInfos(); ++i) {
                    SetInfo set = header.getSetInfos().get(i);

                    if (id + set.getNrMeasurementIDs() > measurementid) {
                        setid = i;
                        spectrumid = measurementid - id;
                        break;
                    } else
                        id += set.getNrMeasurementIDs();
                }

                MassChromatogram<Peak> masschromatogram = null;
                for (IPeak p : peakset) {
                    int mymeasurementid = p.getMeasurementID();
                    int mysetid = header.indexOfSetInfo(header.getSetInfoForMeasurementID(mymeasurementid));
                    if (mysetid == setid && mymeasurementid == spectrumid) {
                        masschromatogram = (MassChromatogram<Peak>) p;
                        break;
                    }
                }
                if (masschromatogram == null)
                    continue;

                for (IPeak peak : masschromatogram.getPeaks())
                    peak.setIntensity(peak.getIntensity() / normalization_factors[measurementid]);
            }
        }

        // write the selection if needed
        if (options.selection_normalized != null) {
            if (options.verbose)
                System.out.println("Writing the normalized selection data");

            PeakMLWriter.write(result.header, selection, null,
                    new GZIPOutputStream(new FileOutputStream(options.selection_normalized)), null);
        }

        // write the factors if needed
        if (options.factors != null) {
            if (options.verbose)
                System.out.println("Writing the normalization factors");

            PrintStream out = new PrintStream(options.factors);
            for (int measurementid = 0; measurementid < nrmeasurements; ++measurementid)
                out.println(header.getMeasurementInfo(measurementid).getLabel() + "\t"
                        + normalization_factors[measurementid]);
        }

        // write the plot if needed
        if (options.img != null) {
            if (options.verbose)
                System.out.println("Writing the graph");

            DefaultCategoryDataset dataset = new DefaultCategoryDataset();
            JFreeChart linechart = ChartFactory.createLineChart(null, "measurement", "normalization factor",
                    dataset, PlotOrientation.VERTICAL, false, // legend
                    false, // tooltips
                    false // urls
            );

            CategoryPlot plot = (CategoryPlot) linechart.getPlot();
            CategoryAxis axis = (CategoryAxis) plot.getDomainAxis();
            axis.setCategoryLabelPositions(CategoryLabelPositions.UP_45);
            LineAndShapeRenderer renderer = (LineAndShapeRenderer) plot.getRenderer();

            renderer.setSeriesShapesFilled(0, true);
            renderer.setSeriesShapesVisible(0, true);

            linechart.setBackgroundPaint(Color.WHITE);
            linechart.setBorderVisible(false);
            linechart.setAntiAlias(true);

            plot.setBackgroundPaint(Color.WHITE);
            plot.setDomainGridlinesVisible(true);
            plot.setRangeGridlinesVisible(true);

            // create the datasets
            for (int measurementid = 0; measurementid < nrmeasurements; ++measurementid)
                dataset.addValue(normalization_factors[measurementid], "",
                        header.getMeasurementInfo(measurementid).getLabel());
            JFreeChartTools.writeAsPDF(new FileOutputStream(options.img), linechart, 800, 500);
        }

        // write the normalized values
        if (options.verbose)
            System.out.println("Writing the normalized data");
        PeakMLWriter.write(result.header, peaksets.getPeaks(), null,
                new GZIPOutputStream(new FileOutputStream(options.output)), null);
    } catch (Exception e) {
        Tool.unexpectedError(e, application);
    }
}

From source file:com.iisigroup.cap.utils.CapSerialization.java

/**
 * compress byte array data with GZIP./*from  www  . jav a  2  s  .  com*/
 * 
 * @param input
 *            the input data
 * @return the compressed data
 * @throws java.io.IOException
 */
public byte[] compress(byte[] input) throws java.io.IOException {
    byte[] result = null;
    java.io.ByteArrayOutputStream baout = null;
    GZIPOutputStream gzipout = null;
    try {
        baout = new java.io.ByteArrayOutputStream();
        gzipout = new GZIPOutputStream(baout);
        gzipout.write(input);
        gzipout.finish();
        result = baout.toByteArray();
        return result;
    } finally {
        IOUtils.closeQuietly(baout);
        IOUtils.closeQuietly(gzipout);
    }
}

From source file:com.streamsets.datacollector.cluster.TarFileCreator.java

public static void createTarGz(File dir, File outputFile) throws IOException {
    Utils.checkState(dir.isDirectory(), Utils.formatL("Path {} is not a directory", dir));
    Utils.checkState(dir.canRead(), Utils.formatL("Directory {} cannot be read", dir));
    FileOutputStream dest = new FileOutputStream(outputFile);
    TarOutputStream out = new TarOutputStream(new BufferedOutputStream(new GZIPOutputStream(dest), 65536));
    File[] files = dir.listFiles();
    Utils.checkState(files != null, Utils.formatL("Directory {} could not be read", dir));
    Utils.checkState(files.length > 0, Utils.formatL("Directory {} is empty", dir));
    tarFolder(null, dir.getAbsolutePath(), out);
    out.close();/*from   w w w.  j a va  2s .c  o  m*/
}

From source file:com.useekm.types.AbstractGeo.java

public static byte[] gzip(byte[] bytes) {
    try {/*  www.j a v a2  s . c o  m*/
        ByteArrayOutputStream bos = new ByteArrayOutputStream();
        BufferedOutputStream bufos = new BufferedOutputStream(new GZIPOutputStream(bos));
        bufos.write(bytes);
        bufos.close();
        byte[] retval = bos.toByteArray();
        bos.close();
        return retval;
    } catch (IOException e) {
        throw new IllegalStateException("Unexpected IOException on inmemory gzip", e);
    }
}

From source file:com.wattzap.model.social.SelfLoopsAPI.java

public static int uploadActivity(String email, String passWord, String fileName, String note)
        throws IOException {
    JSONObject jsonObj = null;//w w  w .j a  va 2s .c  o m

    FileInputStream in = null;
    GZIPOutputStream out = null;
    CloseableHttpClient httpClient = HttpClients.createDefault();
    try {
        HttpPost httpPost = new HttpPost(url);
        httpPost.setHeader("enctype", "multipart/mixed");

        in = new FileInputStream(fileName);
        // Create stream to compress data and write it to the to file.
        ByteArrayOutputStream obj = new ByteArrayOutputStream();
        out = new GZIPOutputStream(obj);

        // Copy bytes from one stream to the other
        byte[] buffer = new byte[4096];
        int bytes_read;
        while ((bytes_read = in.read(buffer)) != -1) {
            out.write(buffer, 0, bytes_read);
        }
        out.close();
        in.close();

        ByteArrayBody bin = new ByteArrayBody(obj.toByteArray(), ContentType.create("application/x-gzip"),
                fileName);
        HttpEntity reqEntity = MultipartEntityBuilder.create()
                .addPart("email", new StringBody(email, ContentType.TEXT_PLAIN))
                .addPart("pw", new StringBody(passWord, ContentType.TEXT_PLAIN)).addPart("tcxfile", bin)
                .addPart("note", new StringBody(note, ContentType.TEXT_PLAIN)).build();

        httpPost.setEntity(reqEntity);

        CloseableHttpResponse response = null;
        try {
            response = httpClient.execute(httpPost);
            int code = response.getStatusLine().getStatusCode();
            switch (code) {
            case 200:

                HttpEntity respEntity = response.getEntity();

                if (respEntity != null) {
                    // EntityUtils to get the response content
                    String content = EntityUtils.toString(respEntity);
                    //System.out.println(content);
                    JSONParser jsonParser = new JSONParser();
                    jsonObj = (JSONObject) jsonParser.parse(content);
                }

                break;
            case 403:
                throw new RuntimeException(
                        "Authentification failure " + email + " " + response.getStatusLine());
            default:
                throw new RuntimeException("Error " + code + " " + response.getStatusLine());
            }
        } catch (ParseException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        } finally {
            if (response != null) {
                response.close();
            }
        }

        int activityId = ((Long) jsonObj.get("activity_id")).intValue();

        // parse error code
        int error = ((Long) jsonObj.get("error_code")).intValue();
        if (activityId == -1) {
            String message = (String) jsonObj.get("message");
            switch (error) {
            case 102:
                throw new RuntimeException("Empty TCX file " + fileName);
            case 103:
                throw new RuntimeException("Invalide TCX Format " + fileName);
            case 104:
                throw new RuntimeException("TCX Already Present " + fileName);
            case 105:
                throw new RuntimeException("Invalid XML " + fileName);
            case 106:
                throw new RuntimeException("invalid compression algorithm");
            case 107:
                throw new RuntimeException("Invalid file mime types");
            default:
                throw new RuntimeException(message + " " + error);
            }
        }

        return activityId;
    } finally {
        if (in != null) {
            in.close();
        }
        if (out != null) {
            out.close();
        }
        httpClient.close();
    }
}

From source file:net.sf.ehcache.constructs.web.PageInfoTest.java

/**
 * Create gzip file in tmp//w ww.  j  a va 2 s . c o  m
 *
 * @throws Exception
 */
protected void setUp() throws Exception {
    String testGzipFile = System.getProperty("java.io.tmpdir") + File.separator + "test.gzip";
    testFile = new File(testGzipFile);
    FileOutputStream fout = new FileOutputStream(testFile);
    GZIPOutputStream gzipOutputStream = new GZIPOutputStream(fout);
    for (int j = 0; j < 100; j++) {
        for (int i = 0; i < 1000; i++) {
            gzipOutputStream.write(i);
        }
    }
    gzipOutputStream.close();
}

From source file:ca.uhn.fhir.rest.client.apache.GZipContentInterceptor.java

@Override
public void interceptRequest(IHttpRequest theRequestInterface) {
    HttpRequestBase theRequest = ((ApacheHttpRequest) theRequestInterface).getApacheRequest();
    if (theRequest instanceof HttpEntityEnclosingRequest) {
        Header[] encodingHeaders = theRequest.getHeaders(Constants.HEADER_CONTENT_ENCODING);
        if (encodingHeaders == null || encodingHeaders.length == 0) {
            HttpEntityEnclosingRequest req = (HttpEntityEnclosingRequest) theRequest;

            ByteArrayOutputStream bos = new ByteArrayOutputStream();
            GZIPOutputStream gos;
            try {
                gos = new GZIPOutputStream(bos);
                req.getEntity().writeTo(gos);
                gos.finish();//  www. j  a v  a2  s . c om
            } catch (IOException e) {
                ourLog.warn("Failed to GZip outgoing content", e);
                return;
            }

            byte[] byteArray = bos.toByteArray();
            ByteArrayEntity newEntity = new ByteArrayEntity(byteArray);
            req.setEntity(newEntity);
            req.addHeader(Constants.HEADER_CONTENT_ENCODING, "gzip");
        }
    }

}