Example usage for java.io OutputStream getClass

List of usage examples for java.io OutputStream getClass

Introduction

In this page you can find the example usage for java.io OutputStream getClass.

Prototype

@HotSpotIntrinsicCandidate
public final native Class<?> getClass();

Source Link

Document

Returns the runtime class of this Object .

Usage

From source file:com.joyent.manta.client.multipart.EncryptionStateRecorder.java

/**
 * Make sure the wrapping stream performs an HMAC digest and cast to the needed type.
 *
 * @param cipherStream the encrypting stream which we are verifying is wrapped in an HMac digest
 * @return the HmacOutputStream//from w ww  .j  a v a2 s  . com
 */
private static HmacOutputStream ensureHmacWrapsCipherStream(final OutputStream cipherStream) {
    if (!cipherStream.getClass().equals(HmacOutputStream.class)) {
        final String message = "Cipher lacks authentication but OutputStream is not HmacOutputStream";
        throw new IllegalStateException(message);
    }

    return (HmacOutputStream) cipherStream;
}

From source file:com.joyent.manta.client.MantaObjectOutputStream.java

/**
 * Finds the most inner stream if the embedded stream is stored on the passed
 * stream as a field named <code>out</code>. This hold true for all classes
 * that extend {@link java.io.FilterOutputStream}.
 *
 * @param stream stream to search for inner stream
 * @return reference to inner stream class
 *///from   w  w w.j av a 2s. c om
protected static OutputStream findMostInnerOutputStream(final OutputStream stream) {
    Field f = FieldUtils.getField(stream.getClass(), "out", true);

    if (f == null) {
        return stream;
    } else {
        try {
            Object result = f.get(stream);

            if (result instanceof OutputStream) {
                return findMostInnerOutputStream((OutputStream) result);
            } else {
                return stream;
            }
        } catch (IllegalAccessException e) {
            // If we can't access the field, then we just return back the original stream
            return stream;
        }
    }
}

From source file:com.joyent.manta.client.MantaObjectOutputStream.java

/**
 * Uses reflection to look into the specified {@link OutputStream} instance to
 * see if there is a boolean field set called "closed", if it is set and accessible
 * via reflection, we return its value. Otherwise, we return null.
 *
 * @param stream instance to reflect on for closed property
 * @return reference to closed property or null if unavailable
 *//*from   ww  w.j  a  va2s. c  o  m*/
protected static Boolean isInnerStreamClosed(final OutputStream stream) {
    OutputStream inner = findMostInnerOutputStream(stream);

    // If the inner most stream is a closed instance, then we can assume
    // the stream is close.
    if (inner.getClass().equals(ClosedOutputStream.class)) {
        return true;
    }

    try {
        Field f = FieldUtils.getField(inner.getClass(), "closed", true);

        if (f == null) {
            throw new IllegalArgumentException("FieldUtils.getField(inner.getClass()) " + "returned null");
        }

        Object result = f.get(inner);
        return (boolean) result;
    } catch (IllegalArgumentException | IllegalAccessException | ClassCastException e) {
        String msg = String.format("Error finding [closed] field on class: %s", inner.getClass());
        LOGGER.warn(msg, e);
        /* If we don't have an inner field called closed, it is inaccessible or
         * the field isn't a boolean, return null because we are now dealing with
         * undefined behavior. */
        return null;
    }
}

From source file:com.ghgande.j2mod.modbus.utils.TestUtils.java

/**
 * Convenient way of sending data from an input stream to an output stream
 * in the most efficient way possible//  w w  w.ja  v  a2s . c  o  m
 * If the bCloseOutput flag is false, then the output stream remains open
 * so that further writes can be made to the stream
 *
 * @param in           Input stream to read from
 * @param out          Output stream to write to
 * @param closeOutput  True if the output stream should be closed on exit
 * @param ignoreErrors True if this method must not throw any socket errors
 *
 * @throws IOException if an error occurs
 */
public static void pipeInputToOutputStream(InputStream in, OutputStream out, boolean closeOutput,
        boolean ignoreErrors) throws IOException {

    OutputStream bufferedOut = out;
    InputStream bufferedIn = in;

    if (in != null && out != null) {
        try {
            // Buffer the streams if they aren't already

            if (!bufferedOut.getClass().equals(BufferedOutputStream.class)) {
                bufferedOut = new BufferedOutputStream(bufferedOut, DEFAULT_BUFFER_SIZE);
            }
            if (!bufferedIn.getClass().equals(BufferedInputStream.class)) {
                bufferedIn = new BufferedInputStream(bufferedIn, DEFAULT_BUFFER_SIZE);
            }

            // Push the data

            int iTmp;
            while ((iTmp = bufferedIn.read()) != -1) {
                bufferedOut.write((byte) iTmp);
            }
            bufferedOut.flush();
            out.flush();
        } catch (IOException e) {
            if (!ignoreErrors && !(e instanceof java.net.SocketException)) {
                logger.error(e.getMessage());
                throw e;
            } else {
                logger.debug(e.getMessage());
            }
        } finally {
            bufferedIn.close();
            if (closeOutput) {
                bufferedOut.close();
            }
        }
    }
}

From source file:com.joyent.manta.client.crypto.EncryptingEntityHelper.java

/**
 * Creates a new {@link OutputStream} implementation that is backed directly
 * by a {@link CipherOutputStream} or a {@link HmacOutputStream} that wraps
 * a {@link CipherOutputStream} depending on the encryption cipher/mode being
 * used. This allows us to support EtM authentication for ciphers/modes that
 * do not natively support authenticating encryption.
 *
 * NOTE: The design of com.joyent.manta.client.multipart.EncryptionStateRecorder
 * is heavily coupled to this implementation! Changing how these streams are
 * wrapped requires changes to EncryptionStateRecorder!
 *
 * @param httpOut       output stream for writing to the HTTP network socket
 * @param cipherDetails information about the method of encryption in use
 * @param cipher        cipher to utilize for encrypting stream
 * @param hmac          current HMAC object with the current checksum state
 * @return a new stream configured based on the parameters
 *///from   w  w  w  .jav  a 2s  .c o m
public static OutputStream makeCipherOutputForStream(final OutputStream httpOut,
        final SupportedCipherDetails cipherDetails, final Cipher cipher, final HMac hmac) {
    /* We have to use a "close shield" here because when close() is called
     * on a CipherOutputStream() for two reasons:
     *
     * 1. CipherOutputStream.close() writes additional bytes that a HMAC
     *    would need to read.
     * 2. Since we are going to append a HMAC to the end of the OutputStream
     *    httpOut, then we have to pretend to close it so that the HMAC bytes
     *    are not being written in the middle of the CipherOutputStream and
     *    thereby corrupting the ciphertext. */

    final CloseShieldOutputStream noCloseOut = new CloseShieldOutputStream(httpOut);
    final CipherOutputStream cipherOut = new CipherOutputStream(noCloseOut, cipher);
    final OutputStream out;

    Validate.notNull(cipherDetails, "Cipher details must not be null");
    Validate.notNull(cipher, "Cipher must not be null");

    // Things are a lot more simple if we are using AEAD
    if (cipherDetails.isAEADCipher()) {
        out = cipherOut;
    } else {
        out = new HmacOutputStream(hmac, cipherOut);
    }

    if (LOGGER.isTraceEnabled()) {
        LOGGER.trace("Creating new OutputStream for multipart [{}]", out.getClass());
    }

    return out;
}

From source file:org.springmodules.cache.impl.Element.java

private void close(OutputStream closeable) {
    if (closeable == null) {
        return;/*from w ww.j a v  a 2 s . c o  m*/
    }

    try {
        closeable.close();
    } catch (Exception exception) {
        String clazz = closeable.getClass().getName();
        logger.error("Unable to close " + clazz, exception);
    }
}

From source file:com.streamsets.pipeline.lib.remote.TestChrootSFTPClient.java

@Test
public void testOpenForWriting() throws Exception {
    String text = "hello";
    File file = testFolder.newFile("file.txt");

    path = testFolder.getRoot().getAbsolutePath();
    setupSSHD(path);//w ww.  j av  a 2  s.  c  o m
    SSHClient sshClient = createSSHClient();

    for (ChrootSFTPClient sftpClient : getClientsWithEquivalentRoots(sshClient)) {
        // We can specify a file as either a relative path "file" or an absolute path "/file" and they should be
        // equivalent
        for (String p : new String[] { file.getName(), "/" + file.getName(), }) {
            file.delete();
            Assert.assertFalse(file.exists());

            OutputStream os = sftpClient.openForWriting(p);
            Assert.assertTrue(os.getClass().getName().startsWith(SFTPStreamFactory.class.getCanonicalName()));
            Assert.assertNotNull(os);
            IOUtils.write(text, os, Charset.forName("UTF-8"));
            os.close();

            Assert.assertEquals(text, Files.readFirstLine(file, Charset.forName("UTF-8")));
        }
    }
}

From source file:fr.cirad.mgdb.exporting.markeroriented.EigenstratExportHandler.java

@Override
public void exportData(OutputStream outputStream, String sModule, List<SampleId> sampleIDs,
        ProgressIndicator progress, DBCursor markerCursor, Map<Comparable, Comparable> markerSynonyms,
        int nMinimumGenotypeQuality, int nMinimumReadDepth, Map<String, InputStream> readyToExportFiles)
        throws Exception {
    // long before = System.currentTimeMillis();

    File warningFile = File.createTempFile("export_warnings_", "");
    FileWriter warningFileWriter = new FileWriter(warningFile);
    File snpFile = null;// ww  w. ja v  a  2 s .c om

    try {
        snpFile = File.createTempFile("snpFile", "");
        FileWriter snpFileWriter = new FileWriter(snpFile);

        ZipOutputStream zos = new ZipOutputStream(outputStream);
        if (ByteArrayOutputStream.class.isAssignableFrom(outputStream.getClass()))
            zos.setLevel(ZipOutputStream.STORED);

        if (readyToExportFiles != null)
            for (String readyToExportFile : readyToExportFiles.keySet()) {
                zos.putNextEntry(new ZipEntry(readyToExportFile));
                InputStream inputStream = readyToExportFiles.get(readyToExportFile);
                byte[] dataBlock = new byte[1024];
                int count = inputStream.read(dataBlock, 0, 1024);
                while (count != -1) {
                    zos.write(dataBlock, 0, count);
                    count = inputStream.read(dataBlock, 0, 1024);
                }
            }

        MongoTemplate mongoTemplate = MongoTemplateManager.get(sModule);
        int markerCount = markerCursor.count();

        List<Individual> individuals = getIndividualsFromSamples(sModule, sampleIDs);

        ArrayList<String> individualList = new ArrayList<String>();
        StringBuffer indFileContents = new StringBuffer();

        for (int i = 0; i < sampleIDs.size(); i++) {
            Individual individual = individuals.get(i);
            if (!individualList.contains(individual.getId())) {
                individualList.add(individual.getId());
                indFileContents
                        .append(individual.getId() + "\t" + getIndividualGenderCode(sModule, individual.getId())
                                + "\t" + (individual.getPopulation() == null ? "." : individual.getPopulation())
                                + LINE_SEPARATOR);
            }
        }

        String exportName = sModule + "_" + markerCount + "variants_" + individualList.size() + "individuals";
        zos.putNextEntry(new ZipEntry(exportName + ".ind"));
        zos.write(indFileContents.toString().getBytes());

        zos.putNextEntry(new ZipEntry(exportName + ".eigenstratgeno"));

        int avgObjSize = (Integer) mongoTemplate
                .getCollection(mongoTemplate.getCollectionName(VariantRunData.class)).getStats()
                .get("avgObjSize");
        int nChunkSize = nMaxChunkSizeInMb * 1024 * 1024 / avgObjSize;
        short nProgress = 0, nPreviousProgress = 0;
        long nLoadedMarkerCount = 0;

        while (markerCursor.hasNext()) {
            int nLoadedMarkerCountInLoop = 0;
            Map<Comparable, String> markerChromosomalPositions = new LinkedHashMap<Comparable, String>();
            boolean fStartingNewChunk = true;
            markerCursor.batchSize(nChunkSize);
            while (markerCursor.hasNext()
                    && (fStartingNewChunk || nLoadedMarkerCountInLoop % nChunkSize != 0)) {
                DBObject exportVariant = markerCursor.next();
                DBObject refPos = (DBObject) exportVariant.get(VariantData.FIELDNAME_REFERENCE_POSITION);
                markerChromosomalPositions.put((Comparable) exportVariant.get("_id"),
                        refPos.get(ReferencePosition.FIELDNAME_SEQUENCE) + ":"
                                + refPos.get(ReferencePosition.FIELDNAME_START_SITE));
                nLoadedMarkerCountInLoop++;
                fStartingNewChunk = false;
            }

            List<Comparable> currentMarkers = new ArrayList<Comparable>(markerChromosomalPositions.keySet());
            LinkedHashMap<VariantData, Collection<VariantRunData>> variantsAndRuns = MgdbDao.getSampleGenotypes(
                    mongoTemplate, sampleIDs, currentMarkers, true,
                    null /*new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_SEQUENCE).and(new Sort(VariantData.FIELDNAME_REFERENCE_POSITION + "." + ChromosomalPosition.FIELDNAME_START_SITE))*/); // query mongo db for matching genotypes
            for (VariantData variant : variantsAndRuns.keySet()) // read data and write results into temporary files (one per sample)
            {
                Comparable variantId = variant.getId();

                List<String> chromAndPos = Helper.split(markerChromosomalPositions.get(variantId), ":");
                if (chromAndPos.size() == 0)
                    LOG.warn("Chromosomal position not found for marker " + variantId);
                // LOG.debug(marker + "\t" + (chromAndPos.length == 0 ? "0" : chromAndPos[0]) + "\t" + 0 + "\t" + (chromAndPos.length == 0 ? 0l : Long.parseLong(chromAndPos[1])) + LINE_SEPARATOR);
                if (markerSynonyms != null) {
                    Comparable syn = markerSynonyms.get(variantId);
                    if (syn != null)
                        variantId = syn;
                }
                snpFileWriter.write(variantId + "\t" + (chromAndPos.size() == 0 ? "0" : chromAndPos.get(0))
                        + "\t" + 0 + "\t" + (chromAndPos.size() == 0 ? 0l : Long.parseLong(chromAndPos.get(1)))
                        + LINE_SEPARATOR);

                Map<String, List<String>> individualGenotypes = new LinkedHashMap<String, List<String>>();
                Collection<VariantRunData> runs = variantsAndRuns.get(variant);
                if (runs != null)
                    for (VariantRunData run : runs)
                        for (Integer sampleIndex : run.getSampleGenotypes().keySet()) {
                            SampleGenotype sampleGenotype = run.getSampleGenotypes().get(sampleIndex);
                            String individualId = individuals
                                    .get(sampleIDs
                                            .indexOf(new SampleId(run.getId().getProjectId(), sampleIndex)))
                                    .getId();

                            Integer gq = null;
                            try {
                                gq = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_GQ);
                            } catch (Exception ignored) {
                            }
                            if (gq != null && gq < nMinimumGenotypeQuality)
                                continue;

                            Integer dp = null;
                            try {
                                dp = (Integer) sampleGenotype.getAdditionalInfo().get(VariantData.GT_FIELD_DP);
                            } catch (Exception ignored) {
                            }
                            if (dp != null && dp < nMinimumReadDepth)
                                continue;

                            String gtCode = sampleGenotype.getCode();
                            List<String> storedIndividualGenotypes = individualGenotypes.get(individualId);
                            if (storedIndividualGenotypes == null) {
                                storedIndividualGenotypes = new ArrayList<String>();
                                individualGenotypes.put(individualId, storedIndividualGenotypes);
                            }
                            storedIndividualGenotypes.add(gtCode);
                        }

                for (int j = 0; j < individualList
                        .size(); j++ /* we use this list because it has the proper ordering*/) {
                    String individualId = individualList.get(j);
                    List<String> genotypes = individualGenotypes.get(individualId);
                    HashMap<Object, Integer> genotypeCounts = new HashMap<Object, Integer>(); // will help us to keep track of missing genotypes
                    int highestGenotypeCount = 0;
                    String mostFrequentGenotype = null;
                    if (genotypes != null)
                        for (String genotype : genotypes) {
                            if (genotype.length() == 0)
                                continue; /* skip missing genotypes */

                            int gtCount = 1 + MgdbDao.getCountForKey(genotypeCounts, genotype);
                            if (gtCount > highestGenotypeCount) {
                                highestGenotypeCount = gtCount;
                                mostFrequentGenotype = genotype;
                            }
                            genotypeCounts.put(genotype, gtCount);
                        }

                    List<String> alleles = mostFrequentGenotype == null ? new ArrayList<String>()
                            : variant.getAllelesFromGenotypeCode(mostFrequentGenotype);

                    int nOutputCode = 0;
                    if (mostFrequentGenotype == null)
                        nOutputCode = 9;
                    else
                        for (String all : Helper.split(mostFrequentGenotype, "/"))
                            if ("0".equals(all))
                                nOutputCode++;
                    if (j == 0 && variant.getKnownAlleleList().size() > 2)
                        warningFileWriter.write("- Variant " + variant.getId()
                                + " is multi-allelic. Make sure Eigenstrat genotype encoding specifications are suitable for you.\n");
                    zos.write(("" + nOutputCode).getBytes());

                    if (genotypeCounts.size() > 1 || alleles.size() > 2) {
                        if (genotypeCounts.size() > 1)
                            warningFileWriter.write("- Dissimilar genotypes found for variant "
                                    + (variantId == null ? variant.getId() : variantId) + ", individual "
                                    + individualId + ". Exporting most frequent: " + nOutputCode + "\n");
                        if (alleles.size() > 2)
                            warningFileWriter.write("- More than 2 alleles found for variant "
                                    + (variantId == null ? variant.getId() : variantId) + ", individual "
                                    + individualId + ". Exporting only the first 2 alleles.\n");
                    }
                }
                zos.write((LINE_SEPARATOR).getBytes());
            }

            if (progress.hasAborted())
                return;

            nLoadedMarkerCount += nLoadedMarkerCountInLoop;
            nProgress = (short) (nLoadedMarkerCount * 100 / markerCount);
            if (nProgress > nPreviousProgress) {
                // if (nProgress%5 == 0)
                //    LOG.info("============= exportData: " + nProgress + "% =============" + (System.currentTimeMillis() - before)/1000 + "s");
                progress.setCurrentStepProgress(nProgress);
                nPreviousProgress = nProgress;
            }
        }

        snpFileWriter.close();
        zos.putNextEntry(new ZipEntry(exportName + ".snp"));
        BufferedReader in = new BufferedReader(new FileReader(snpFile));
        String sLine;
        while ((sLine = in.readLine()) != null)
            zos.write((sLine + "\n").getBytes());
        in.close();

        warningFileWriter.close();
        if (warningFile.length() > 0) {
            zos.putNextEntry(new ZipEntry(exportName + "-REMARKS.txt"));
            int nWarningCount = 0;
            in = new BufferedReader(new FileReader(warningFile));
            while ((sLine = in.readLine()) != null) {
                zos.write((sLine + "\n").getBytes());
                nWarningCount++;
            }
            LOG.info("Number of Warnings for export (" + exportName + "): " + nWarningCount);
            in.close();
        }
        warningFile.delete();

        zos.close();
        progress.setCurrentStepProgress((short) 100);
    } finally {
        if (snpFile != null && snpFile.exists())
            snpFile.delete();
    }
}

From source file:com.ikanow.aleph2.storage_service_hdfs.services.TestHdfsDataWriteService.java

/** Get some easy testing out the way
 *       HfdsDataWriteService.getCanonicalCodec
 *       HfdsDataWriteService.wrapOutputInCodec
 *///from ww w. ja  v a2  s . c  o  m
@Test
public void test_utilityMethods_codecs() {

    // No codec
    {
        final DataBucketBean test_bucket = BeanTemplateUtils.build(DataBucketBean.class)
                .with(DataBucketBean::full_name, "/test/static")
                .with(DataBucketBean::data_schema, BeanTemplateUtils.build(DataSchemaBean.class)
                        .with(DataSchemaBean::storage_schema, BeanTemplateUtils.build(StorageSchemaBean.class)
                                .with(StorageSchemaBean::processed,
                                        BeanTemplateUtils.build(StorageSchemaBean.StorageSubSchemaBean.class)
                                                .with(StorageSchemaBean.StorageSubSchemaBean::codec, "gzip")
                                                .done().get())
                                .done().get())
                        .done().get())
                .done().get();

        OutputStream out_in = new ByteArrayOutputStream();

        Optional<String> test = HfdsDataWriteService.getCanonicalCodec(
                test_bucket.data_schema().storage_schema(), IStorageService.StorageStage.raw);
        assertEquals(Optional.empty(), test);

        final OutputStream out_out = HfdsDataWriteService.wrapOutputInCodec(test, out_in);
        assertEquals(out_in, out_out);
    }
    // Malformed codec
    {
        final DataBucketBean test_bucket = BeanTemplateUtils.build(DataBucketBean.class)
                .with(DataBucketBean::full_name, "/test/static")
                .with(DataBucketBean::data_schema,
                        BeanTemplateUtils.build(DataSchemaBean.class).with(DataSchemaBean::storage_schema,
                                BeanTemplateUtils.build(StorageSchemaBean.class).with(StorageSchemaBean::json,
                                        BeanTemplateUtils.build(StorageSchemaBean.StorageSubSchemaBean.class)
                                                .with(StorageSchemaBean.StorageSubSchemaBean::codec, "banana")
                                                .done().get())
                                        .done().get())
                                .done().get())
                .done().get();

        OutputStream out_in = new ByteArrayOutputStream();

        Optional<String> test = HfdsDataWriteService.getCanonicalCodec(
                test_bucket.data_schema().storage_schema(), IStorageService.StorageStage.json);
        assertEquals(Optional.of("banana"), test);

        final OutputStream out_out = HfdsDataWriteService.wrapOutputInCodec(test, out_in);
        assertEquals(out_in, out_out);
    }
    // gz
    {
        final DataBucketBean test_bucket = BeanTemplateUtils.build(DataBucketBean.class)
                .with(DataBucketBean::full_name, "/test/static")
                .with(DataBucketBean::data_schema,
                        BeanTemplateUtils.build(DataSchemaBean.class).with(DataSchemaBean::storage_schema,
                                BeanTemplateUtils.build(StorageSchemaBean.class).with(StorageSchemaBean::json,
                                        BeanTemplateUtils.build(StorageSchemaBean.StorageSubSchemaBean.class)
                                                .with(StorageSchemaBean.StorageSubSchemaBean::codec, "gzip")
                                                .done().get())
                                        .done().get())
                                .done().get())
                .done().get();

        OutputStream out_in = new ByteArrayOutputStream();

        Optional<String> test = HfdsDataWriteService.getCanonicalCodec(
                test_bucket.data_schema().storage_schema(), IStorageService.StorageStage.json);
        assertEquals(Optional.of("gz"), test);

        final OutputStream out_out = HfdsDataWriteService.wrapOutputInCodec(test, out_in);
        assertTrue("Stream is gzip: " + out_out.getClass().getSimpleName(),
                out_out instanceof java.util.zip.GZIPOutputStream);
    }
    //gzip
    {
        final DataBucketBean test_bucket = BeanTemplateUtils.build(DataBucketBean.class)
                .with(DataBucketBean::full_name, "/test/static")
                .with(DataBucketBean::data_schema, BeanTemplateUtils.build(DataSchemaBean.class).with(
                        DataSchemaBean::storage_schema,
                        BeanTemplateUtils.build(StorageSchemaBean.class).with(StorageSchemaBean::json,
                                BeanTemplateUtils.build(StorageSchemaBean.StorageSubSchemaBean.class)
                                        .with(StorageSchemaBean.StorageSubSchemaBean::codec, "gz").done().get())
                                .done().get())
                        .done().get())
                .done().get();

        OutputStream out_in = new ByteArrayOutputStream();

        Optional<String> test = HfdsDataWriteService.getCanonicalCodec(
                test_bucket.data_schema().storage_schema(), IStorageService.StorageStage.json);
        assertEquals(Optional.of("gz"), test);

        final OutputStream out_out = HfdsDataWriteService.wrapOutputInCodec(test, out_in);
        assertTrue("Stream is gzip: " + out_out.getClass().getSimpleName(),
                out_out instanceof java.util.zip.GZIPOutputStream);
    }
    //fr.sn
    {
        final DataBucketBean test_bucket = BeanTemplateUtils.build(DataBucketBean.class)
                .with(DataBucketBean::full_name, "/test/static")
                .with(DataBucketBean::data_schema,
                        BeanTemplateUtils.build(DataSchemaBean.class).with(DataSchemaBean::storage_schema,
                                BeanTemplateUtils.build(StorageSchemaBean.class).with(StorageSchemaBean::json,
                                        BeanTemplateUtils.build(StorageSchemaBean.StorageSubSchemaBean.class)
                                                .with(StorageSchemaBean.StorageSubSchemaBean::codec, "fr.sz")
                                                .done().get())
                                        .done().get())
                                .done().get())
                .done().get();

        OutputStream out_in = new ByteArrayOutputStream();

        Optional<String> test = HfdsDataWriteService.getCanonicalCodec(
                test_bucket.data_schema().storage_schema(), IStorageService.StorageStage.json);
        assertEquals(Optional.of("fr.sz"), test);

        final OutputStream out_out = HfdsDataWriteService.wrapOutputInCodec(test, out_in);
        assertTrue("Stream is snappy framed: " + out_out.getClass().getSimpleName(),
                out_out instanceof org.xerial.snappy.SnappyFramedOutputStream);
    }
    //snappy_frame
    {
        final DataBucketBean test_bucket = BeanTemplateUtils.build(DataBucketBean.class)
                .with(DataBucketBean::full_name, "/test/static")
                .with(DataBucketBean::data_schema,
                        BeanTemplateUtils.build(DataSchemaBean.class).with(DataSchemaBean::storage_schema,
                                BeanTemplateUtils.build(StorageSchemaBean.class).with(StorageSchemaBean::json,
                                        BeanTemplateUtils.build(StorageSchemaBean.StorageSubSchemaBean.class)
                                                .with(StorageSchemaBean.StorageSubSchemaBean::codec,
                                                        "snappy_framed")
                                                .done().get())
                                        .done().get())
                                .done().get())
                .done().get();

        OutputStream out_in = new ByteArrayOutputStream();

        Optional<String> test = HfdsDataWriteService.getCanonicalCodec(
                test_bucket.data_schema().storage_schema(), IStorageService.StorageStage.json);
        assertEquals(Optional.of("fr.sz"), test);

        final OutputStream out_out = HfdsDataWriteService.wrapOutputInCodec(test, out_in);
        assertTrue("Stream is snappy framed: " + out_out.getClass().getSimpleName(),
                out_out instanceof org.xerial.snappy.SnappyFramedOutputStream);
    }
    //sn
    {
        final DataBucketBean test_bucket = BeanTemplateUtils.build(DataBucketBean.class)
                .with(DataBucketBean::full_name, "/test/static")
                .with(DataBucketBean::data_schema, BeanTemplateUtils.build(DataSchemaBean.class).with(
                        DataSchemaBean::storage_schema,
                        BeanTemplateUtils.build(StorageSchemaBean.class).with(StorageSchemaBean::json,
                                BeanTemplateUtils.build(StorageSchemaBean.StorageSubSchemaBean.class)
                                        .with(StorageSchemaBean.StorageSubSchemaBean::codec, "sz").done().get())
                                .done().get())
                        .done().get())
                .done().get();

        OutputStream out_in = new ByteArrayOutputStream();

        Optional<String> test = HfdsDataWriteService.getCanonicalCodec(
                test_bucket.data_schema().storage_schema(), IStorageService.StorageStage.json);
        assertEquals(Optional.of("sz"), test);

        final OutputStream out_out = HfdsDataWriteService.wrapOutputInCodec(test, out_in);
        assertTrue("Stream is snappy: " + out_out.getClass().getSimpleName(),
                out_out instanceof org.xerial.snappy.SnappyOutputStream);
    }
    //snappy
    {
        final DataBucketBean test_bucket = BeanTemplateUtils.build(DataBucketBean.class)
                .with(DataBucketBean::full_name, "/test/static")
                .with(DataBucketBean::data_schema,
                        BeanTemplateUtils.build(DataSchemaBean.class).with(DataSchemaBean::storage_schema,
                                BeanTemplateUtils.build(StorageSchemaBean.class).with(StorageSchemaBean::json,
                                        BeanTemplateUtils.build(StorageSchemaBean.StorageSubSchemaBean.class)
                                                .with(StorageSchemaBean.StorageSubSchemaBean::codec, "snappy")
                                                .done().get())
                                        .done().get())
                                .done().get())
                .done().get();

        OutputStream out_in = new ByteArrayOutputStream();

        Optional<String> test = HfdsDataWriteService.getCanonicalCodec(
                test_bucket.data_schema().storage_schema(), IStorageService.StorageStage.json);
        assertEquals(Optional.of("sz"), test);

        final OutputStream out_out = HfdsDataWriteService.wrapOutputInCodec(test, out_in);
        assertTrue("Stream is snappy: " + out_out.getClass().getSimpleName(),
                out_out instanceof org.xerial.snappy.SnappyOutputStream);
    }

}

From source file:de.mpg.mpdl.inge.exportmanager.Export.java

/**
 * Walk around the itemList XML, fetch all files from components via URIs and put them into the
 * archive {@link OutputStream} aos/*from   w ww  .j a va2 s  .c o  m*/
 * 
 * @param aos - array {@link OutputStream}
 * @param itemList - XML with the files to be fetched, see NS:
 *        http://www.escidoc.de/schemas/components/0.7
 * @throws ExportManagerException
 */
private void fetchComponentsDo(OutputStream aos, String itemList) throws ExportManagerException {
    Document doc = parseDocument(itemList);
    NodeIterator ni = getFilteredNodes(new ComponentNodeFilter(), doc);

    // login only once
    String userHandle;
    try {
        userHandle = AdminHelper.loginUser(USER_ID, PASSWORD);
    } catch (Exception e) {
        throw new ExportManagerException("Cannot login", e);
    }

    String fileName;
    Node n;
    while ((n = ni.nextNode()) != null) {

        Element componentElement = (Element) n;
        NodeList nl = componentElement.getElementsByTagNameNS(COMPONENTS_NS, "content");
        Element contentElement = (Element) nl.item(0);
        if (contentElement == null) {
            throw new ExportManagerException(
                    "Wrong item XML: {" + COMPONENTS_NS + "}component element doesn't contain content element. "
                            + "Component id: " + componentElement.getAttributeNS(XLINK_NS, "href"));
        }
        String href = contentElement.getAttributeNS(XLINK_NS, "href");
        String storageStatus = contentElement.getAttribute("storage");

        // get file name
        if ("internal-managed".equals(storageStatus)) {
            NodeIterator nif = ((DocumentTraversal) doc).createNodeIterator(componentElement,
                    NodeFilter.SHOW_ELEMENT, new FileNameNodeFilter(), true);
            Node nf;

            if ((nf = nif.nextNode()) != null) {
                fileName = ((Element) nf).getTextContent();

                // names of files for
                Matcher m = Pattern.compile("^([\\w.]+?)(\\s+|$)", Pattern.CASE_INSENSITIVE | Pattern.DOTALL)
                        .matcher(fileName);
                m.find();
                fileName = m.group(1);
            } else {
                throw new ExportManagerException("Missed file property: {" + COMPONENTS_NS
                        + "}component element doesn't contain file-name element (md-records/md-record/file:file/dc:title). "
                        + "Component id: " + componentElement.getAttributeNS(XLINK_NS, "href"));
            }
        }
        // TODO: the external-managed will be processed later
        else {
            throw new ExportManagerException("Missed internal-managed file in {" + COMPONENTS_NS
                    + "}component: components/component/content[@storage=\"internal-managed\"]"
                    + "Component id: " + componentElement.getAttributeNS(XLINK_NS, "href"));
        }

        logger.info("link to the content: " + href);
        logger.info("storage status: " + storageStatus);
        logger.info("fileName: " + fileName);

        // get file via URI
        String url;
        try {
            url = PropertyReader.getFrameworkUrl() + href;
        } catch (Exception e) {
            throw new ExportManagerException("Cannot get framework url", e);
        }

        logger.info("url=" + url);
        GetMethod method = new GetMethod(url);

        method.setFollowRedirects(false);
        method.setRequestHeader("Cookie", "escidocCookie=" + userHandle);

        // Execute the method with HttpClient.
        HttpClient client = new HttpClient();
        try {
            ProxyHelper.executeMethod(client, method);
        } catch (Exception e) {
            throw new ExportManagerException("Cannot execute HttpMethod", e);
        }

        int status = method.getStatusCode();
        logger.info("Status=" + status);

        if (status != 200)
            fileName += ".error" + status;

        byte[] responseBody;
        try {
            responseBody = method.getResponseBody();
        } catch (Exception e) {

            throw new ExportManagerException("Cannot get Response Body", e);

        }
        InputStream bis = new BufferedInputStream(new ByteArrayInputStream(responseBody));

        if (aos instanceof ZipOutputStream) {
            ZipEntry ze = new ZipEntry(fileName);
            ze.setSize(responseBody.length);
            try {
                ((ZipOutputStream) aos).putNextEntry(ze);
                writeFromStreamToStream(bis, aos);
                ((ZipOutputStream) aos).closeEntry();
            } catch (Exception e) {
                throw new ExportManagerException("zip2stream generation problem", e);
            }

        } else if (aos instanceof TarOutputStream) {
            TarEntry te = new TarEntry(fileName);
            te.setSize(responseBody.length);
            try {
                ((TarOutputStream) aos).putNextEntry(te);
                writeFromStreamToStream(bis, aos);
                ((TarOutputStream) aos).closeEntry();
            } catch (Exception e) {
                throw new ExportManagerException("tar2stream generation problem", e);
            }
        } else {
            throw new ExportManagerException("Unsupported archive output stream: " + aos.getClass());
        }
        try {
            bis.close();
        } catch (Exception e) {
            throw new ExportManagerException("Cannot close InputStream", e);
        }
    }

}