Example usage for java.util.zip ZipOutputStream write

List of usage examples for java.util.zip ZipOutputStream write

Introduction

In this page you can find the example usage for java.util.zip ZipOutputStream write.

Prototype

public synchronized void write(byte[] b, int off, int len) throws IOException 

Source Link

Document

Writes an array of bytes to the current ZIP entry data.

Usage

From source file:nl.nn.adapterframework.pipes.CompressPipe.java

public PipeRunResult doPipe(Object input, IPipeLineSession session) throws PipeRunException {
    try {/*w  ww  . ja  v a2 s.com*/
        Object result;
        InputStream in;
        OutputStream out;
        boolean zipMultipleFiles = false;
        if (messageIsContent) {
            if (input instanceof byte[]) {
                in = new ByteArrayInputStream((byte[]) input);
            } else {
                in = new ByteArrayInputStream(input.toString().getBytes());
            }
        } else {
            if (compress && StringUtils.contains((String) input, ";")) {
                zipMultipleFiles = true;
                in = null;
            } else {
                in = new FileInputStream((String) input);
            }
        }
        if (resultIsContent) {
            out = new ByteArrayOutputStream();
            result = out;
        } else {
            String outFilename = null;
            if (messageIsContent) {
                outFilename = FileUtils.getFilename(getParameterList(), session, (File) null, filenamePattern);
            } else {
                outFilename = FileUtils.getFilename(getParameterList(), session, new File((String) input),
                        filenamePattern);
            }
            File outFile = new File(outputDirectory, outFilename);
            result = outFile.getAbsolutePath();
            out = new FileOutputStream(outFile);
        }
        if (zipMultipleFiles) {
            ZipOutputStream zipper = new ZipOutputStream(out);
            StringTokenizer st = new StringTokenizer((String) input, ";");
            while (st.hasMoreElements()) {
                String fn = st.nextToken();
                String zipEntryName = getZipEntryName(fn, session);
                zipper.putNextEntry(new ZipEntry(zipEntryName));
                in = new FileInputStream(fn);
                try {
                    int readLength = 0;
                    byte[] block = new byte[4096];
                    while ((readLength = in.read(block)) > 0) {
                        zipper.write(block, 0, readLength);
                    }
                } finally {
                    in.close();
                    zipper.closeEntry();
                }
            }
            zipper.close();
            out = zipper;
        } else {
            if (compress) {
                if ("gz".equals(fileFormat) || fileFormat == null && resultIsContent) {
                    out = new GZIPOutputStream(out);
                } else {
                    ZipOutputStream zipper = new ZipOutputStream(out);
                    String zipEntryName = getZipEntryName(input, session);
                    zipper.putNextEntry(new ZipEntry(zipEntryName));
                    out = zipper;
                }
            } else {
                if ("gz".equals(fileFormat) || fileFormat == null && messageIsContent) {
                    in = new GZIPInputStream(in);
                } else {
                    ZipInputStream zipper = new ZipInputStream(in);
                    String zipEntryName = getZipEntryName(input, session);
                    if (zipEntryName.equals("")) {
                        // Use first entry found
                        zipper.getNextEntry();
                    } else {
                        // Position the stream at the specified entry
                        ZipEntry zipEntry = zipper.getNextEntry();
                        while (zipEntry != null && !zipEntry.getName().equals(zipEntryName)) {
                            zipEntry = zipper.getNextEntry();
                        }
                    }
                    in = zipper;
                }
            }
            try {
                int readLength = 0;
                byte[] block = new byte[4096];
                while ((readLength = in.read(block)) > 0) {
                    out.write(block, 0, readLength);
                }
            } finally {
                out.close();
                in.close();
            }
        }
        return new PipeRunResult(getForward(), getResultMsg(result));
    } catch (Exception e) {
        PipeForward exceptionForward = findForward(EXCEPTIONFORWARD);
        if (exceptionForward != null) {
            log.warn(getLogPrefix(session) + "exception occured, forwarded to [" + exceptionForward.getPath()
                    + "]", e);
            String originalMessage;
            if (input instanceof String) {
                originalMessage = (String) input;
            } else {
                originalMessage = "Object of type " + input.getClass().getName();
            }
            String resultmsg = new ErrorMessageFormatter().format(getLogPrefix(session), e, this,
                    originalMessage, session.getMessageId(), 0);
            return new PipeRunResult(exceptionForward, resultmsg);
        }
        throw new PipeRunException(this, "Unexpected exception during compression", e);
    }
}

From source file:org.bimserver.collada.OpenGLTransmissionFormatSerializer.java

public void addToZipFile(Path file, ZipOutputStream outputStream) throws FileNotFoundException, IOException {
    // Get file name: example.file
    String fileName = file.getFileName().toString();
    // Create an abstraction for how it will appear in the ZIP file.
    ZipEntry zipEntry = new ZipEntry(fileName);
    // Write the file's abstraction into the ZIP file.
    outputStream.putNextEntry(zipEntry);
    // Prepare to read the actual file.
    InputStream inputStream = Files.newInputStream(file);
    // Buffer the file 4 kilobytes at a time.
    byte[] bytes = new byte[4096];
    // Read the file to its conclusion, writing out the information on the way.
    int length = 0;
    while ((length = inputStream.read(bytes)) != -1)
        outputStream.write(bytes, 0, length);
    // Close the included file stream.
    inputStream.close();//ww w  .  j a  v a 2s . c  om
    // Close the entry in the ZIP file.
    outputStream.closeEntry();
}

From source file:org.apache.felix.deploymentadmin.itest.util.DPSigner.java

public void writeSignedManifest(Manifest manifest, ZipOutputStream zos, PrivateKey privKey,
        X509Certificate cert) throws Exception {
    zos.putNextEntry(new ZipEntry(JarFile.MANIFEST_NAME));
    manifest.write(zos);// w  w  w . j a  v a 2s  .c  o m
    zos.closeEntry();

    long now = System.currentTimeMillis();

    // Determine the signature-file manifest...
    Manifest sf = createSignatureFile(manifest);

    byte[] sfRawBytes;
    try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
        sf.write(baos);
        sfRawBytes = baos.toByteArray();
    }

    ZipEntry sigFileEntry = new ZipEntry(m_baseName.concat(".SF"));
    sigFileEntry.setTime(now);
    zos.putNextEntry(sigFileEntry);
    // Write the actual entry data...
    zos.write(sfRawBytes, 0, sfRawBytes.length);
    zos.closeEntry();

    // Create a PKCS#7 signature...
    byte[] encoded = calculateSignatureBlock(privKey, cert, sfRawBytes);

    ZipEntry blockFileEntry = new ZipEntry(m_baseName.concat(getBlockFileExtension(privKey)));
    blockFileEntry.setTime(now);
    zos.putNextEntry(blockFileEntry);
    zos.write(encoded);
    zos.closeEntry();
}

From source file:edu.ncsu.asbransc.mouflon.recorder.UploadFile.java

private void zipUp(File out, File[] in) {
    FileOutputStream fout = null;
    ZipOutputStream zout = null;
    byte[] buffer = new byte[4096];
    int bytesRead = 0;
    try {//from   ww w .ja va2  s  . com
        fout = new FileOutputStream(out);
        zout = new ZipOutputStream(fout);
        zout.setMethod(ZipOutputStream.DEFLATED);

        for (File currFile : in) {
            FileInputStream fin = new FileInputStream(currFile);
            ZipEntry currEntry = new ZipEntry(currFile.getName());
            zout.putNextEntry(currEntry);
            while ((bytesRead = fin.read(buffer)) > 0) {
                zout.write(buffer, 0, bytesRead);
            }
            zout.closeEntry();
            fin.close();
        }
    } catch (FileNotFoundException e) {

        e.printStackTrace();
    } catch (IOException e) {

        e.printStackTrace();
    } finally {
        try {
            zout.close();
            fout.close();
        } catch (IOException e) {
            e.printStackTrace();
        } catch (NullPointerException e) {
            e.printStackTrace();
        }

    }

}

From source file:fr.cirad.mgdb.exporting.markeroriented.VcfExportHandler.java

@Override
public void exportData(OutputStream outputStream, String sModule, List<SampleId> sampleIDs,
        ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms,
        int nMinimumGenotypeQuality, int nMinimumReadDepth, Map<String, InputStream> readyToExportFiles)
        throws Exception {
    Integer projectId = null;/*from w  ww  .ja  v  a 2s.  co m*/
    for (SampleId spId : sampleIDs) {
        if (projectId == null)
            projectId = spId.getProject();
        else if (projectId != spId.getProject()) {
            projectId = 0;
            break; // more than one project are involved: no header will be written
        }
    }

    File warningFile = File.createTempFile("export_warnings_", "");
    FileWriter warningFileWriter = new FileWriter(warningFile);

    MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule);
    int markerCount = markerCursor.count();
    ZipOutputStream zos = new ZipOutputStream(outputStream);

    if (readyToExportFiles != null)
        for (String readyToExportFile : readyToExportFiles.keySet()) {
            zos.putNextEntry(new ZipEntry(readyToExportFile));
            InputStream inputStream = readyToExportFiles.get(readyToExportFile);
            byte[] dataBlock = new byte[1024];
            int count = inputStream.read(dataBlock, 0, 1024);
            while (count != -1) {
                zos.write(dataBlock, 0, count);
                count = inputStream.read(dataBlock, 0, 1024);
            }
        }

    LinkedHashMap<SampleId, String> sampleIDToIndividualIdMap = new LinkedHashMap<SampleId, String>();
    ArrayList<String> individualList = new ArrayList<String>();
    List<Individual> individuals = getIndividualsFromSamples(sModule, sampleIDs);
    for (int i = 0; i < sampleIDs.size(); i++) {
        String individualId = individuals.get(i).getId();
        sampleIDToIndividualIdMap.put(sampleIDs.get(i), individualId);
        if (!individualList.contains(individualId)) {
            individualList.add(individualId);
        }
    }

    String exportName = sModule + "_" + markerCount + "variants_" + individualList.size() + "individuals";
    zos.putNextEntry(new ZipEntry(exportName + ".vcf"));

    int avgObjSize = (Integer) mongoTemplate
            .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize");
    int nQueryChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize;

    VariantContextWriter writer = null;
    try {
        List<String> distinctSequenceNames = new ArrayList<String>();

        String sequenceSeqCollName = MongoTemplateManager.getMongoCollectionName(Sequence.class);
        if (mongoTemplate.collectionExists(sequenceSeqCollName)) {
            DBCursor markerCursorCopy = markerCursor.copy();
            markerCursorCopy.batchSize(nQueryChunkSize);
            while (markerCursorCopy.hasNext()) {
                int nLoadedMarkerCountInLoop = 0;
                boolean fStartingNewChunk = true;
                while (markerCursorCopy.hasNext()
                        && (fStartingNewChunk || nLoadedMarkerCountInLoop % nQueryChunkSize != 0)) {
                    DBObject exportVariant = markerCursorCopy.next();
                    String chr = (String) ((DBObject) exportVariant
                            .get(VariantData.FIELDNAME_REFERENCE_POSITION))
                                    .get(ReferencePosition.FIELDNAME_SEQUENCE);
                    if (!distinctSequenceNames.contains(chr))
                        distinctSequenceNames.add(chr);
                }
            }
            markerCursorCopy.close();
        }

        Collections.sort(distinctSequenceNames, new AlphaNumericStringComparator());
        SAMSequenceDictionary dict = createSAMSequenceDictionary(sModule, distinctSequenceNames);
        writer = new CustomVCFWriter(null, zos, dict, false, false, true);
        //         VariantContextWriterBuilder vcwb = new VariantContextWriterBuilder();
        //         vcwb.unsetOption(Options.INDEX_ON_THE_FLY);
        //         vcwb.unsetOption(Options.DO_NOT_WRITE_GENOTYPES);
        //         vcwb.setOption(Options.USE_ASYNC_IOINDEX_ON_THE_FLY);
        //         vcwb.setOption(Options.ALLOW_MISSING_FIELDS_IN_HEADER);
        //         vcwb.setReferenceDictionary(dict);
        //         writer = vcwb.build();
        //         writer = new AsyncVariantContextWriter(writer, 3000);

        progress.moveToNextStep(); // done with dictionary
        DBCursor headerCursor = mongoTemplate
                .getCollection(MongoTemplateManager.getMongoCollectionName(DBVCFHeader.class))
                .find(new BasicDBObject("_id." + VcfHeaderId.FIELDNAME_PROJECT, projectId));
        Set<VCFHeaderLine> headerLines = new HashSet<VCFHeaderLine>();
        boolean fWriteCommandLine = true, fWriteEngineHeaders = true; // default values

        while (headerCursor.hasNext()) {
            DBVCFHeader dbVcfHeader = DBVCFHeader.fromDBObject(headerCursor.next());
            headerLines.addAll(dbVcfHeader.getHeaderLines());

            // Add sequence header lines (not stored in our vcf header collection)
            BasicDBObject projection = new BasicDBObject(SequenceStats.FIELDNAME_SEQUENCE_LENGTH, true);
            int nSequenceIndex = 0;
            for (String sequenceName : distinctSequenceNames) {
                String sequenceInfoCollName = MongoTemplateManager.getMongoCollectionName(SequenceStats.class);
                boolean fCollectionExists = mongoTemplate.collectionExists(sequenceInfoCollName);
                if (fCollectionExists) {
                    DBObject record = mongoTemplate.getCollection(sequenceInfoCollName).findOne(
                            new Query(Criteria.where("_id").is(sequenceName)).getQueryObject(), projection);
                    if (record == null) {
                        LOG.warn("Sequence '" + sequenceName + "' not found in collection "
                                + sequenceInfoCollName);
                        continue;
                    }

                    Map<String, String> sequenceLineData = new LinkedHashMap<String, String>();
                    sequenceLineData.put("ID", (String) record.get("_id"));
                    sequenceLineData.put("length",
                            ((Number) record.get(SequenceStats.FIELDNAME_SEQUENCE_LENGTH)).toString());
                    headerLines.add(new VCFContigHeaderLine(sequenceLineData, nSequenceIndex++));
                }
            }
            fWriteCommandLine = headerCursor.size() == 1 && dbVcfHeader.getWriteCommandLine(); // wouldn't make sense to include command lines for several runs
            if (!dbVcfHeader.getWriteEngineHeaders())
                fWriteEngineHeaders = false;
        }
        headerCursor.close();

        VCFHeader header = new VCFHeader(headerLines, individualList);
        header.setWriteCommandLine(fWriteCommandLine);
        header.setWriteEngineHeaders(fWriteEngineHeaders);
        writer.writeHeader(header);

        short nProgress = 0, nPreviousProgress = 0;
        long nLoadedMarkerCount = 0;
        HashMap<SampleId, Comparable /*phID*/> phasingIDsBySample = new HashMap<SampleId, Comparable>();

        while (markerCursor.hasNext()) {
            if (progress.hasAborted())
                return;

            int nLoadedMarkerCountInLoop = 0;
            boolean fStartingNewChunk = true;
            markerCursor.batchSize(nQueryChunkSize);
            List<Comparable> currentMarkers = new ArrayList<Comparable>();
            while (markerCursor.hasNext()
                    && (fStartingNewChunk || nLoadedMarkerCountInLoop % nQueryChunkSize != 0)) {
                DBObject exportVariant = markerCursor.next();
                currentMarkers.add((Comparable) exportVariant.get("_id"));
                nLoadedMarkerCountInLoop++;
                fStartingNewChunk = false;
            }

            LinkedHashMap<VariantData, Collection<VariantRunData>> variantsAndRuns = MgdbDao.getSampleGenotypes(
                    mongoTemplate, sampleIDs, currentMarkers, true,
                    null /*new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_SEQUENCE).and(new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_START_SITE))*/); // query mongo db for matching genotypes
            for (VariantData variant : variantsAndRuns.keySet()) {
                VariantContext vc = variant.toVariantContext(variantsAndRuns.get(variant),
                        !ObjectId.isValid(variant.getId().toString()), sampleIDToIndividualIdMap,
                        phasingIDsBySample, nMinimumGenotypeQuality, nMinimumReadDepth, warningFileWriter,
                        markerSynonyms == null ? variant.getId() : markerSynonyms.get(variant.getId()));
                try {
                    writer.add(vc);
                } catch (Throwable t) {
                    Exception e = new Exception("Unable to convert to VariantContext: " + variant.getId(), t);
                    LOG.debug("error", e);
                    throw e;
                }

                if (nLoadedMarkerCountInLoop > currentMarkers.size())
                    LOG.error("Bug: writing variant number " + nLoadedMarkerCountInLoop + " (only "
                            + currentMarkers.size() + " variants expected)");
            }

            nLoadedMarkerCount += nLoadedMarkerCountInLoop;
            nProgress = (short) (nLoadedMarkerCount * 100 / markerCount);
            if (nProgress > nPreviousProgress) {
                progress.setCurrentStepProgress(nProgress);
                nPreviousProgress = nProgress;
            }
        }
        progress.setCurrentStepProgress((short) 100);

    } catch (Exception e) {
        LOG.error("Error exporting", e);
        progress.setError(e.getMessage());
        return;
    } finally {
        warningFileWriter.close();
        if (warningFile.length() > 0) {
            zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt"));
            int nWarningCount = 0;
            BufferedReader in = new BufferedReader(new FileReader(warningFile));
            String sLine;
            while ((sLine = in.readLine()) != null) {
                zos.write((sLine + "\n").getBytes());
                nWarningCount++;
            }
            LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount);
            in.close();
        }
        warningFile.delete();
        if (writer != null)
            try {
                writer.close();
            } catch (Throwable ignored) {
            }
    }
}

From source file:fr.cirad.mgdb.exporting.markeroriented.GFFExportHandler.java

@Override
public void exportData(OutputStream outputStream, String sModule, List<SampleId> sampleIDs,
        ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms,
        int nMinimumGenotypeQuality, int nMinimumReadDepth, Map<String, InputStream> readyToExportFiles)
        throws Exception {
    MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule);
    ZipOutputStream zos = new ZipOutputStream(outputStream);

    if (readyToExportFiles != null)
        for (String readyToExportFile : readyToExportFiles.keySet()) {
            zos.putNextEntry(new ZipEntry(readyToExportFile));
            InputStream inputStream = readyToExportFiles.get(readyToExportFile);
            byte[] dataBlock = new byte[1024];
            int count = inputStream.read(dataBlock, 0, 1024);
            while (count != -1) {
                zos.write(dataBlock, 0, count);
                count = inputStream.read(dataBlock, 0, 1024);
            }/*from w  w  w.ja va2  s .c o m*/
        }

    File warningFile = File.createTempFile("export_warnings_", "");
    FileWriter warningFileWriter = new FileWriter(warningFile);

    int markerCount = markerCursor.count();

    List<Individual> individuals = getIndividualsFromSamples(sModule, sampleIDs);
    ArrayList<String> individualList = new ArrayList<String>();
    for (int i = 0; i < sampleIDs.size(); i++) {
        Individual individual = individuals.get(i);
        if (!individualList.contains(individual.getId())) {
            individualList.add(individual.getId());
        }
    }

    String exportName = sModule + "_" + markerCount + "variants_" + individualList.size() + "individuals";
    zos.putNextEntry(new ZipEntry(exportName + ".gff3"));
    String header = "##gff-version 3" + LINE_SEPARATOR;
    zos.write(header.getBytes());

    TreeMap<String, String> typeToOntology = new TreeMap<String, String>();
    typeToOntology.put(Type.SNP.toString(), "SO:0000694");
    typeToOntology.put(Type.INDEL.toString(), "SO:1000032");
    typeToOntology.put(Type.MIXED.toString(), "SO:0001059");
    typeToOntology.put(Type.SYMBOLIC.toString(), "SO:0000109");
    typeToOntology.put(Type.MNP.toString(), "SO:0001059");

    int avgObjSize = (Integer) mongoTemplate
            .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats().get("avgObjSize");
    int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize;
    short nProgress = 0, nPreviousProgress = 0;
    long nLoadedMarkerCount = 0;

    while (markerCursor.hasNext()) {
        int nLoadedMarkerCountInLoop = 0;
        Map<Comparable, String> markerChromosomalPositions = new LinkedHashMap<Comparable, String>();
        boolean fStartingNewChunk = true;
        markerCursor.batchSize(nChunkSize);
        while (markerCursor.hasNext() && (fStartingNewChunk || nLoadedMarkerCountInLoop % nChunkSize != 0)) {
            DBObject exportVariant = markerCursor.next();
            DBObject refPos = (DBObject) exportVariant.get(VariantData.FIELDNAME_REFERENCE_POSITION);
            markerChromosomalPositions.put((Comparable) exportVariant.get("_id"),
                    refPos.get(ReferencePosition.FIELDNAME_SEQUENCE) + ":"
                            + refPos.get(ReferencePosition.FIELDNAME_START_SITE));
            nLoadedMarkerCountInLoop++;
            fStartingNewChunk = false;
        }

        List<Comparable> currentMarkers = new ArrayList<Comparable>(markerChromosomalPositions.keySet());
        LinkedHashMap<VariantData, Collection<VariantRunData>> variantsAndRuns = MgdbDao.getSampleGenotypes(
                mongoTemplate, sampleIDs, currentMarkers, true,
                null /*new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_SEQUENCE).and(new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_START_SITE))*/); // query mongo db for matching genotypes
        for (VariantData variant : variantsAndRuns.keySet()) // read data and write results into temporary files (one per sample)
        {
            Comparable variantId = variant.getId();
            List<String> variantDataOrigin = new ArrayList<String>();

            Map<String, Integer> gqValueForSampleId = new LinkedHashMap<String, Integer>();
            Map<String, Integer> dpValueForSampleId = new LinkedHashMap<String, Integer>();
            Map<String, List<String>> individualGenotypes = new LinkedHashMap<String, List<String>>();
            List<String> chromAndPos = Helper.split(markerChromosomalPositions.get(variantId), ":");
            if (chromAndPos.size() == 0)
                LOG.warn("Chromosomal position not found for marker " + variantId);
            // LOG.debug(marker + "\t" + (chromAndPos.length == 0 ? "0" : chromAndPos[0]) + "\t" + 0 + "\t" + (chromAndPos.length == 0 ? 0l : Long.parseLong(chromAndPos[1])) + LINE_SEPARATOR);
            if (markerSynonyms != null) {
                Comparable syn = markerSynonyms.get(variantId);
                if (syn != null)
                    variantId = syn;
            }

            Collection<VariantRunData> runs = variantsAndRuns.get(variant);
            if (runs != null)
                for (VariantRunData run : runs)
                    for (Integer sampleIndex : run.getSampleGenotypes().keySet()) {
                        SampleGenotype sampleGenotype = run.getSampleGenotypes().get(sampleIndex);
                        String individualId = individuals
                                .get(sampleIDs.indexOf(new SampleId(run.getId().getProjectId(), sampleIndex)))
                                .getId();

                        Integer gq = null;
                        try {
                            gq = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_GQ);
                        } catch (Exception ignored) {
                        }
                        if (gq != null && gq < nMinimumGenotypeQuality)
                            continue;

                        Integer dp = null;
                        try {
                            dp = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_DP);
                        } catch (Exception ignored) {
                        }
                        if (dp != null && dp < nMinimumReadDepth)
                            continue;

                        String gtCode = sampleGenotype.getCode();
                        List<String> storedIndividualGenotypes = individualGenotypes.get(individualId);
                        if (storedIndividualGenotypes == null) {
                            storedIndividualGenotypes = new ArrayList<String>();
                            individualGenotypes.put(individualId, storedIndividualGenotypes);
                        }
                        storedIndividualGenotypes.add(gtCode);
                    }

            zos.write((chromAndPos.get(0) + "\t" + StringUtils.join(variantDataOrigin, ";") /*source*/ + "\t"
                    + typeToOntology.get(variant.getType()) + "\t" + Long.parseLong(chromAndPos.get(1)) + "\t"
                    + Long.parseLong(chromAndPos.get(1)) + "\t" + "." + "\t" + "+" + "\t" + "." + "\t")
                            .getBytes());
            Comparable syn = markerSynonyms == null ? null : markerSynonyms.get(variant.getId());
            zos.write(("ID=" + variant.getId() + ";" + (syn != null ? "Name=" + syn + ";" : "") + "alleles="
                    + StringUtils.join(variant.getKnownAlleleList(), "/") + ";" + "refallele="
                    + variant.getKnownAlleleList().get(0) + ";").getBytes());

            for (int j = 0; j < individualList
                    .size(); j++ /* we use this list because it has the proper ordering*/) {

                NumberFormat nf = NumberFormat.getInstance(Locale.US);
                nf.setMaximumFractionDigits(4);
                HashMap<String, Integer> compt1 = new HashMap<String, Integer>();
                int highestGenotypeCount = 0;
                int sum = 0;

                String individualId = individualList.get(j);
                List<String> genotypes = individualGenotypes.get(individualId);
                HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes

                String mostFrequentGenotype = null;
                if (genotypes != null)
                    for (String genotype : genotypes) {
                        if (genotype.length() == 0)
                            continue; /* skip missing genotypes */

                        int count = 0;
                        for (String t : variant.getAllelesFromGenotypeCode(genotype)) {
                            for (String t1 : variant.getKnownAlleleList()) {
                                if (t.equals(t1) && !(compt1.containsKey(t1))) {
                                    count++;
                                    compt1.put(t1, count);
                                } else if (t.equals(t1) && compt1.containsKey(t1)) {
                                    if (compt1.get(t1) != 0) {
                                        count++;
                                        compt1.put(t1, count);
                                    } else
                                        compt1.put(t1, count);
                                } else if (!(compt1.containsKey(t1))) {
                                    compt1.put(t1, 0);
                                }
                            }
                        }
                        for (int countValue : compt1.values()) {
                            sum += countValue;
                        }

                        int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype);
                        if (gtCount > highestGenotypeCount) {
                            highestGenotypeCount = gtCount;
                            mostFrequentGenotype = genotype;
                        }
                        genotypeCounts.put(genotype, gtCount);
                    }

                List<String> alleles = mostFrequentGenotype == null ? new ArrayList<String>()
                        : variant.getAllelesFromGenotypeCode(mostFrequentGenotype);

                if (alleles.size() != 0) {
                    zos.write(("acounts=" + individualId + ":").getBytes());

                    for (String knowAllelesCompt : compt1.keySet()) {
                        zos.write(
                                (knowAllelesCompt + " " + nf.format(compt1.get(knowAllelesCompt) / (float) sum)
                                        + " " + compt1.get(knowAllelesCompt) + " ").getBytes());
                    }
                    zos.write((alleles.size() + ";").getBytes());
                }
                if (genotypeCounts.size() > 1) {
                    Comparable sVariantId = markerSynonyms != null ? markerSynonyms.get(variant.getId())
                            : variant.getId();
                    warningFileWriter.write("- Dissimilar genotypes found for variant "
                            + (sVariantId == null ? variant.getId() : sVariantId) + ", individual "
                            + individualId + ". Exporting most frequent: " + StringUtils.join(alleles, ",")
                            + "\n");
                }
            }
            zos.write((LINE_SEPARATOR).getBytes());
        }

        if (progress.hasAborted())
            return;

        nLoadedMarkerCount += nLoadedMarkerCountInLoop;
        nProgress = (short) (nLoadedMarkerCount * 100 / markerCount);
        if (nProgress > nPreviousProgress) {
            //            if (nProgress%5 == 0)
            //               LOG.info("========================= exportData: " + nProgress + "% =========================" + (System.currentTimeMillis() - before)/1000 + "s");
            progress.setCurrentStepProgress(nProgress);
            nPreviousProgress = nProgress;
        }
    }

    warningFileWriter.close();
    if (warningFile.length() > 0) {
        zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt"));
        int nWarningCount = 0;
        BufferedReader in = new BufferedReader(new FileReader(warningFile));
        String sLine;
        while ((sLine = in.readLine()) != null) {
            zos.write((sLine + "\n").getBytes());
            in.readLine();
            nWarningCount++;
        }
        LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount);
        in.close();
    }
    warningFile.delete();

    zos.close();
    progress.setCurrentStepProgress((short) 100);
}

From source file:com.liferay.portal.deploy.hot.ExtHotDeployListener.java

private void zipWebInfJar(String zipName, File[] files) throws Exception {
    byte[] buffer = new byte[4096]; // Create a buffer for copying
    int bytesRead;

    ZipOutputStream out = new ZipOutputStream(new FileOutputStream(zipName));
    try {//from   w ww.j a v a2 s  . c o  m
        for (int i = 0; i < files.length; i++) {
            File f = files[i];
            if (f.isDirectory()) {
                continue;
            }

            String fileName = "WEB-INF/" + f.getName();
            FileInputStream in = new FileInputStream(f); // Stream to read file
            try {
                ZipEntry entry = new ZipEntry(fileName); // Make a ZipEntry
                out.putNextEntry(entry); // Store entry
                while ((bytesRead = in.read(buffer)) != -1) {
                    out.write(buffer, 0, bytesRead);
                }
            } finally {
                in.close();
            }
        }
    } finally {
        out.close();
    }
}

From source file:org.egov.stms.notice.service.SewerageNoticeService.java

/**
 * @param inputStream// w w w . j  av  a 2s  .  c o  m
 * @param noticeNo
 * @param out
 * @return zip output stream file
 */
public ZipOutputStream addFilesToZip(final InputStream inputStream, final String noticeNo,
        final ZipOutputStream out) {
    if (LOGGER.isDebugEnabled())
        LOGGER.debug("Entered into addFilesToZip method");
    final byte[] buffer = new byte[1024];
    try {
        out.setLevel(Deflater.DEFAULT_COMPRESSION);
        out.putNextEntry(new ZipEntry(noticeNo.replaceAll("/", "_")));
        int len;
        while ((len = inputStream.read(buffer)) > 0)
            out.write(buffer, 0, len);
        inputStream.close();

    } catch (final IllegalArgumentException iae) {
        LOGGER.error(EXCEPTION_IN_ADD_FILES_TO_ZIP, iae);
        throw new ValidationException(Arrays.asList(new ValidationError("error", iae.getMessage())));
    } catch (final FileNotFoundException fnfe) {
        LOGGER.error(EXCEPTION_IN_ADD_FILES_TO_ZIP, fnfe);
        throw new ValidationException(Arrays.asList(new ValidationError("error", fnfe.getMessage())));
    } catch (final IOException ioe) {
        LOGGER.error(EXCEPTION_IN_ADD_FILES_TO_ZIP, ioe);
        throw new ValidationException(Arrays.asList(new ValidationError("error", ioe.getMessage())));
    }
    if (LOGGER.isDebugEnabled())
        LOGGER.debug("Exit from addFilesToZip method");
    return out;
}

From source file:net.sourceforge.jweb.maven.mojo.InWarMinifyMojo.java

public void execute() throws MojoExecutionException, MojoFailureException {
    if (disabled)
        return;//  www. jav  a 2s.c o  m
    processConfiguration();
    String name = this.getBuilddir().getAbsolutePath() + File.separator + this.getFinalName() + "."
            + this.getPacking();
    this.getLog().info(name);
    MinifyFileFilter fileFilter = new MinifyFileFilter();
    int counter = 0;
    try {
        File finalWarFile = new File(name);
        File tempFile = File.createTempFile(finalWarFile.getName(), null);
        tempFile.delete();//check deletion
        boolean renameOk = finalWarFile.renameTo(tempFile);
        if (!renameOk) {
            getLog().error("Can not rename file, please check.");
        }

        ZipOutputStream out = new ZipOutputStream(new FileOutputStream(finalWarFile));
        ZipFile zipFile = new ZipFile(tempFile);
        Enumeration<? extends ZipEntry> entries = zipFile.entries();
        while (entries.hasMoreElements()) {
            ZipEntry entry = entries.nextElement();
            //no compress, just transfer to war
            if (!fileFilter.accept(entry)) {
                getLog().debug("nocompress entry: " + entry.getName());
                out.putNextEntry(entry);
                InputStream inputStream = zipFile.getInputStream(entry);
                byte[] buf = new byte[512];
                int len = -1;
                while ((len = inputStream.read(buf)) > 0) {
                    out.write(buf, 0, len);
                }
                inputStream.close();
                continue;
            }

            File sourceTmp = new File(FileUtils.getUserDirectoryPath() + File.separator + ".mvntmp"
                    + File.separator + counter + ".tmp");
            File destTmp = new File(FileUtils.getUserDirectoryPath() + File.separator + ".mvntmp"
                    + File.separator + counter + ".min.tmp");
            FileUtils.writeStringToFile(sourceTmp, "");
            FileUtils.writeStringToFile(destTmp, "");

            //assemble arguments
            String[] provied = getYuiArguments();
            int length = (provied == null ? 0 : provied.length);
            length += 5;
            int i = 0;

            String[] ret = new String[length];

            ret[i++] = "--type";
            ret[i++] = (entry.getName().toLowerCase().endsWith(".css") ? "css" : "js");

            if (provied != null) {
                for (String s : provied) {
                    ret[i++] = s;
                }
            }

            ret[i++] = sourceTmp.getAbsolutePath();
            ret[i++] = "-o";
            ret[i++] = destTmp.getAbsolutePath();

            try {
                InputStream in = zipFile.getInputStream(entry);
                FileUtils.copyInputStreamToFile(in, sourceTmp);
                in.close();

                YUICompressorNoExit.main(ret);
            } catch (Exception e) {
                this.getLog().warn("compress error, this file will not be compressed:" + buildStack(e));
                FileUtils.copyFile(sourceTmp, destTmp);
            }

            out.putNextEntry(new ZipEntry(entry.getName()));
            InputStream compressedIn = new FileInputStream(destTmp);
            byte[] buf = new byte[512];
            int len = -1;
            while ((len = compressedIn.read(buf)) > 0) {
                out.write(buf, 0, len);
            }
            compressedIn.close();

            String sourceSize = decimalFormat.format(sourceTmp.length() * 1.0d / 1024) + " KB";
            String destSize = decimalFormat.format(destTmp.length() * 1.0d / 1024) + " KB";
            getLog().info("compressed entry:" + entry.getName() + " [" + sourceSize + " ->" + destSize + "/"
                    + numberFormat.format(1 - destTmp.length() * 1.0d / sourceTmp.length()) + "]");

            counter++;
        }
        zipFile.close();
        out.close();

        FileUtils.cleanDirectory(new File(FileUtils.getUserDirectoryPath() + File.separator + ".mvntmp"));
        FileUtils.forceDelete(new File(FileUtils.getUserDirectoryPath() + File.separator + ".mvntmp"));
    } catch (Exception e) {
        e.printStackTrace();
    }

}

From source file:com.asual.summer.bundle.BundleDescriptorMojo.java

private void zip(File directory, File base, ZipOutputStream zos) throws IOException {
    File[] files = directory.listFiles();
    byte[] buffer = new byte[8192];
    int read = 0;
    for (int i = 0, n = files.length; i < n; i++) {
        String name = files[i].getPath().replace(File.separatorChar, '/')
                .substring(base.getPath().length() + 1);
        if (files[i].isDirectory()) {
            if (!name.endsWith("/")) {
                name = name + "/";
            }/*from   w w  w .  j a v  a2  s  .com*/
            ZipEntry entry = new ZipEntry(name);
            zos.putNextEntry(entry);
            zip(files[i], base, zos);
        } else {
            FileInputStream in = new FileInputStream(files[i]);
            ZipEntry entry = new ZipEntry(name);
            zos.putNextEntry(entry);
            while (-1 != (read = in.read(buffer))) {
                zos.write(buffer, 0, read);
            }
            in.close();
        }
    }
}