Example usage for java.util.zip GZIPOutputStream close

List of usage examples for java.util.zip GZIPOutputStream close

Introduction

In this page you can find the example usage for java.util.zip GZIPOutputStream close.

Prototype

public void close() throws IOException 

Source Link

Document

Writes remaining compressed data to the output stream and closes the underlying stream.

Usage

From source file:com.microsoft.tfs.core.clients.versioncontrol.engines.internal.workers.CheckinWorker.java

/**
 * Compress the source file to a new temporary file, and return the absolute
 * path to the new temporary file. The algorithm used is gzip.
 *
 * @param sourceFile/* w ww. j a v a2  s.c o m*/
 *        the source file to compress (must not be <code>null</code> or
 *        empty).
 * @return the full path to the new compressed temp file that was created.
 * @throws CoreCancelException
 *         if the compression was cancelled by the user via Core's
 *         TaskMonitor. The output file is removed before this exception is
 *         thrown.
 */
private File compressToTempFile(final String sourceFile) throws CoreCancelException {
    Check.notNullOrEmpty(sourceFile, "sourceFile"); //$NON-NLS-1$

    FileInputStream is = null;
    FileOutputStream os = null;
    GZIPOutputStream gzos = null;

    final String messageFormat = MessageFormat.format(
            Messages.getString("CheckinWorker.CompressFIleProgressFormat_SKIPVALIDATE"), //$NON-NLS-1$
            change.getServerItem());
    final FileProcessingProgressMonitorAdapter monitor = new FileProcessingProgressMonitorAdapter(
            userCancellationMonitor, new File(sourceFile).length(), messageFormat);

    TaskMonitorService.pushTaskMonitor(monitor);

    try {
        final File temp = File.createTempFile("teamexplorer", ".tmp"); //$NON-NLS-1$ //$NON-NLS-2$

        final String tempFileName = temp.getAbsolutePath();

        is = new FileInputStream(sourceFile);
        os = new FileOutputStream(tempFileName);
        gzos = new GZIPOutputStream(os);

        final byte[] buffer = new byte[GZIP_COMPRESS_READ_BUFFER];
        int read = 0;
        while ((read = is.read(buffer)) != -1) {
            if (TaskMonitorService.getTaskMonitor().isCanceled()) {
                temp.delete();
                throw new CoreCancelException();
            }

            gzos.write(buffer, 0, read);
            TaskMonitorService.getTaskMonitor().worked(read);
        }

        return temp;
    } catch (final IOException e) {
        throw new VersionControlException(e);
    } finally {
        try {
            if (is != null) {
                is.close();
            }
        } catch (final IOException e) {
        }

        try {
            if (gzos != null) {
                gzos.close();
            }
        } catch (final IOException e) {
        }

        TaskMonitorService.popTaskMonitor();
    }
}

From source file:de.innovationgate.utils.WGUtils.java

/**
 * Zips bytes to compressed bytes/*ww w . j  av  a2 s .c  o m*/
 * @param uncompressedBytes Input
 * @throws IOException
 */
public static byte[] zip(byte[] uncompressedBytes) throws IOException {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    GZIPOutputStream zos = new GZIPOutputStream(baos);
    zos.write(uncompressedBytes, 0, uncompressedBytes.length);
    zos.close();

    return baos.toByteArray();
}

From source file:au.org.ala.biocache.web.WMSController.java

/**
 * Get occurrences by query as gzipped csv.
 *
 * @param requestParams//from w w  w .  ja va 2  s . co m
 * @param response
 * @throws Exception
 */
@RequestMapping(value = { "/webportal/occurrences.gz", "/mapping/occurrences.gz" }, method = RequestMethod.GET)
public void occurrenceGz(SpatialSearchRequestParams requestParams, HttpServletResponse response)
        throws Exception {

    response.setContentType("text/plain");
    response.setCharacterEncoding("gzip");

    ServletOutputStream outStream = response.getOutputStream();
    java.util.zip.GZIPOutputStream gzip = new java.util.zip.GZIPOutputStream(outStream);

    writeOccurrencesCsvToStream(requestParams, gzip);

    gzip.flush();
    gzip.close();
}

From source file:com.panet.imeta.trans.steps.sort.SortRows.java

private boolean addBuffer(RowMetaInterface rowMeta, Object[] r) throws KettleException {
    if (r != null) {
        // Do we need to convert binary string keys?
        ///*from   w  w w. j  a va2s . co m*/
        for (int i = 0; i < data.fieldnrs.length; i++) {
            if (data.convertKeysToNative[i]) {
                int index = data.fieldnrs[i];
                r[index] = rowMeta.getValueMeta(index).convertBinaryStringToNativeType((byte[]) r[index]);
            }
        }

        // Save row
        // 
        data.buffer.add(r);
    }
    if (data.files.size() == 0 && r == null) // No more records: sort buffer
    {
        quickSort(data.buffer);
    }

    // Check the free memory every 1000 rows...
    //
    data.freeCounter++;
    if (data.sortSize <= 0 && data.freeCounter >= 1000) {
        data.freeMemoryPct = Const.getPercentageFreeMemory();
        data.freeCounter = 0;

        if (log.isDetailed()) {
            data.memoryReporting++;
            if (data.memoryReporting >= 10) {
                logDetailed("Available memory : " + data.freeMemoryPct + "%");
                data.memoryReporting = 0;
            }
        }
    }

    boolean doSort = data.buffer.size() == data.sortSize; // Buffer is full: sort & dump to disk
    doSort |= data.files.size() > 0 && r == null && data.buffer.size() > 0; // No more records: join from disk 
    doSort |= data.freeMemoryPctLimit > 0 && data.freeMemoryPct < data.freeMemoryPctLimit
            && data.buffer.size() >= data.minSortSize;

    // time to sort the buffer and write the data to disk...
    //
    if (doSort) {
        // First sort the rows in buffer[]
        quickSort(data.buffer);

        // Then write them to disk...
        DataOutputStream dos;
        GZIPOutputStream gzos;
        int p;
        Object[] previousRow = null;

        try {
            FileObject fileObject = KettleVFS.createTempFile(meta.getPrefix(), ".tmp",
                    environmentSubstitute(meta.getDirectory()));

            data.files.add(fileObject); // Remember the files!
            OutputStream outputStream = KettleVFS.getOutputStream(fileObject, false);
            if (data.compressFiles) {
                gzos = new GZIPOutputStream(new BufferedOutputStream(outputStream));
                dos = new DataOutputStream(gzos);
            } else {
                dos = new DataOutputStream(new BufferedOutputStream(outputStream, 500000));
                gzos = null;
            }

            // Just write the data, nothing else
            if (meta.isOnlyPassingUniqueRows()) {
                int index = 0;
                while (index < data.buffer.size()) {
                    Object[] row = data.buffer.get(index);
                    if (previousRow != null) {
                        int result = data.outputRowMeta.compare(row, previousRow, data.fieldnrs);
                        if (result == 0) {
                            data.buffer.remove(index); // remove this duplicate element as requested
                            if (log.isRowLevel())
                                logRowlevel("Duplicate row removed: " + data.outputRowMeta.getString(row));
                        } else {
                            index++;
                        }
                    } else {
                        index++;
                    }
                    previousRow = row;
                }
            }

            // How many records do we have left?
            data.bufferSizes.add(data.buffer.size());

            for (p = 0; p < data.buffer.size(); p++) {
                data.outputRowMeta.writeData(dos, data.buffer.get(p));
            }

            if (data.sortSize < 0) {
                if (data.buffer.size() > data.minSortSize) {
                    data.minSortSize = data.buffer.size(); // if we did it once, we can do it again.

                    // Memory usage goes up over time, even with garbage collection
                    // We need pointers, file handles, etc.
                    // As such, we're going to lower the min sort size a bit
                    //
                    data.minSortSize = (int) Math.round((double) data.minSortSize * 0.90);
                }
            }

            // Clear the list
            data.buffer.clear();

            // Close temp-file
            dos.close(); // close data stream
            if (gzos != null) {
                gzos.close(); // close gzip stream
            }
            outputStream.close(); // close file stream

            // How much memory do we have left?
            //
            data.freeMemoryPct = Const.getPercentageFreeMemory();
            data.freeCounter = 0;
            if (data.sortSize <= 0) {
                if (log.isDetailed())
                    logDetailed("Available memory : " + data.freeMemoryPct + "%");
            }

        } catch (Exception e) {
            throw new KettleException("Error processing temp-file!", e);
        }

        data.getBufferIndex = 0;
    }

    return true;
}

From source file:org.nxtgenutils.bsseq.impl.MethylationProfiler.java

/**
 * @param mPileupSample/*  ww w . ja  va 2  s  . c  om*/
 * @param mPileupControl
 * @param outputFile
 * @param estimateBisulfiteConversionRateFrom
 *
 * @param sampleNames
 */
public MethylationProfiler(File mPileupSample, File mPileupControl, File outputFile,
        String estimateBisulfiteConversionRateFrom, List<String> sampleNames) {

    if (estimateBisulfiteConversionRateFrom != null && estimateBisulfiteConversionRateFrom.equals("")) {
        estimateBisulfiteConversionRateFrom = NON_CPG_CONTEXT;
    }

    int sampleCount = getSampleCount(mPileupSample);

    if (sampleNames != null) {
        this.sampleNames = sampleNames;
    } else {
        this.sampleNames = generateSampleNames(sampleCount);
    }

    nonCpGContexts.add("CT");
    nonCpGContexts.add("CA");
    nonCpGContexts.add("CC");
    nonCpGContexts.add("total");

    initializeNonCpGCountMaps(this.sampleNames);

    Map<String, Double> bisulfiteConversionRate = null;
    if (estimateBisulfiteConversionRateFrom != null) {

        logger.info("Estimating bisulfite conversionr rate...");
        File mPileupConversionRate = mPileupSample;
        if (mPileupControl != null) {
            mPileupConversionRate = mPileupControl;
        }

        bisulfiteConversionRate = estimateBisulfiteConversionRate(mPileupConversionRate,
                estimateBisulfiteConversionRateFrom);

        NumberFormat formatter = new DecimalFormat("#.#");
        for (String name : this.sampleNames) {
            double rate = bisulfiteConversionRate.get(name);
            if (rate < 0) {
                logger.info(name + ": -");
            } else {
                logger.info(name + ": " + formatter.format(rate * 100.0) + "%");
            }
        }

        //print conversion rates to file
        String conversionRateOutputPath = outputFile.getAbsolutePath() + "." + NON_CPG_CONTEXT
                + ".conversion.rate.tsv";
        if (!estimateBisulfiteConversionRateFrom.equals(NON_CPG_CONTEXT)) {
            conversionRateOutputPath = outputFile.getAbsolutePath() + "." + estimateBisulfiteConversionRateFrom
                    + ".conversion.rate.tsv";
        }

        logger.info("Writing bisulfite conversionr rate to " + conversionRateOutputPath + "...");
        writeConversionRate(bisulfiteConversionRate, conversionRateOutputPath);
        logger.info("--------------------------------------------------------");

    }

    MultiPileupParser pileupParser = new SimpleMultiPileupParser(mPileupSample);
    Iterator<MultiPileup> pileupIterator = pileupParser.iterator();

    GZIPOutputStream outputStream = null;

    try {
        outputStream = new GZIPOutputStream(new FileOutputStream(outputFile.getAbsolutePath() + ".gz"));
    } catch (IOException e) {
        logger.error("Exception while opening output file: " + outputFile.getAbsolutePath(), e);
        System.exit(1);
    }

    PrintWriter pw = new PrintWriter(outputStream);

    logger.info("Generating methylation profiles...");

    MultiPileup previousMultiPileup = null;
    int lineCounter = 0;
    while (pileupIterator.hasNext()) {

        lineCounter++;

        if (lineCounter % 1000000 == 0) {
            logger.info(lineCounter + " lines processed");
        }

        MultiPileup currentMultiPileup = pileupIterator.next();

        if (previousMultiPileup != null) {

            MethylationProfileRecord profileRecordForwardStrand = generateProfileRecord(previousMultiPileup,
                    currentMultiPileup, Strand.FORWARD, this.sampleNames, estimateBisulfiteConversionRateFrom);
            MethylationProfileRecord profileRecordReverseStrand = generateProfileRecord(previousMultiPileup,
                    currentMultiPileup, Strand.REVERSE, this.sampleNames, estimateBisulfiteConversionRateFrom);

            if (profileRecordForwardStrand != null) {

                if (estimateBisulfiteConversionRateFrom != null) {
                    scoreProfileRecord(profileRecordForwardStrand, bisulfiteConversionRate);
                }

                pw.println(profileRecordForwardStrand.formatProfileRecord());
                pw.flush();

            }

            if (profileRecordReverseStrand != null) {

                if (estimateBisulfiteConversionRateFrom != null) {
                    scoreProfileRecord(profileRecordReverseStrand, bisulfiteConversionRate);
                }

                pw.println(profileRecordReverseStrand.formatProfileRecord());
                pw.flush();

            }

        } else {

            pw.print(profileColumnLabels);
            for (int i = 0; i < sampleCount; i++) {
                for (String label : sampleColumnLabels) {
                    pw.print("\t" + this.sampleNames.get(i) + "_" + label);
                }
            }
            pw.println();

        }

        previousMultiPileup = currentMultiPileup;

    }

    pw.close();

    try {
        outputStream.close();
    } catch (IOException e) {
        logger.error("Exception while closing output file: " + outputFile.getAbsolutePath(), e);
    }

    logger.info(lineCounter + " lines processed");
    logger.info("--------------------------------------------------------");
    logger.info("non-CpG conversion rate");

    printNonCpGConversionRate();

    String conversionRateOutputPath = outputFile.getAbsolutePath() + "." + NON_CPG_CONTEXT
            + ".conversion.rate.detailed.tsv";
    writeNonCpGConversionRate(conversionRateOutputPath);

}

From source file:org.pentaho.di.trans.steps.sort.SortRows.java

void sortExternalRows() throws KettleException {
    // we just recently dump buffer - but there is no new rows came.
    if (data.buffer.isEmpty()) {
        return;/*from   ww w  .  jav a2s.c  om*/
    }

    // First sort the rows in buffer[]
    quickSort(data.buffer);

    // Then write them to disk...
    DataOutputStream dos;
    GZIPOutputStream gzos;
    int p;

    try {
        FileObject fileObject = KettleVFS.createTempFile(meta.getPrefix(), ".tmp",
                environmentSubstitute(meta.getDirectory()), getTransMeta());

        data.files.add(fileObject); // Remember the files!
        OutputStream outputStream = KettleVFS.getOutputStream(fileObject, false);
        if (data.compressFiles) {
            gzos = new GZIPOutputStream(new BufferedOutputStream(outputStream));
            dos = new DataOutputStream(gzos);
        } else {
            dos = new DataOutputStream(new BufferedOutputStream(outputStream, 500000));
            gzos = null;
        }

        // Just write the data, nothing else
        List<Integer> duplicates = new ArrayList<Integer>();
        Object[] previousRow = null;
        if (meta.isOnlyPassingUniqueRows()) {
            int index = 0;
            while (index < data.buffer.size()) {
                Object[] row = data.buffer.get(index);
                if (previousRow != null) {
                    int result = data.outputRowMeta.compare(row, previousRow, data.fieldnrs);
                    if (result == 0) {
                        duplicates.add(index);
                        if (log.isRowLevel()) {
                            logRowlevel(BaseMessages.getString(PKG, "SortRows.RowLevel.DuplicateRowRemoved",
                                    data.outputRowMeta.getString(row)));
                        }
                    }
                }
                index++;
                previousRow = row;
            }
        }

        // How many records do we have left?
        data.bufferSizes.add(data.buffer.size() - duplicates.size());

        int duplicatesIndex = 0;
        for (p = 0; p < data.buffer.size(); p++) {
            boolean skip = false;
            if (duplicatesIndex < duplicates.size()) {
                if (p == duplicates.get(duplicatesIndex)) {
                    skip = true;
                    duplicatesIndex++;
                }
            }
            if (!skip) {
                data.outputRowMeta.writeData(dos, data.buffer.get(p));
            }
        }

        if (data.sortSize < 0) {
            if (data.buffer.size() > data.minSortSize) {
                data.minSortSize = data.buffer.size(); // if we did it once, we can do
                                                       // it again.

                // Memory usage goes up over time, even with garbage collection
                // We need pointers, file handles, etc.
                // As such, we're going to lower the min sort size a bit
                //
                data.minSortSize = (int) Math.round(data.minSortSize * 0.90);
            }
        }

        // Clear the list
        data.buffer.clear();

        // Close temp-file
        dos.close(); // close data stream
        if (gzos != null) {
            gzos.close(); // close gzip stream
        }
        outputStream.close(); // close file stream

        // How much memory do we have left?
        //
        data.freeMemoryPct = Const.getPercentageFreeMemory();
        data.freeCounter = 0;
        if (data.sortSize <= 0) {
            if (log.isDetailed()) {
                logDetailed(
                        BaseMessages.getString(PKG, "SortRows.Detailed.AvailableMemory", data.freeMemoryPct));
            }
        }

    } catch (Exception e) {
        throw new KettleException("Error processing temp-file!", e);
    }

    data.getBufferIndex = 0;
}

From source file:hudson.plugins.project_inheritance.projects.InheritanceProject.java

/**
 * This method dumps the version store as serialized,
 * GZIP compressed, Base64 encoded XML.//from   www  .j a  v  a  2  s  .co  m
 * @return
 */
public String doGetVersionsAsCompressedXML() {
    if (this.versionStore == null) {
        return "";
    }
    String xml = this.versionStore.toXML();
    try {
        ByteArrayOutputStream baos = new ByteArrayOutputStream(512);
        BASE64EncoderStream b64s = new BASE64EncoderStream(baos);
        GZIPOutputStream gos = new GZIPOutputStream(b64s);
        gos.write(xml.getBytes());
        gos.finish();
        gos.close();
        return baos.toString();
    } catch (IOException ex) {
        return "";
    }

}