Example usage for org.apache.hadoop.io.compress CodecPool returnCompressor

List of usage examples for org.apache.hadoop.io.compress CodecPool returnCompressor

Introduction

In this page you can find the example usage for org.apache.hadoop.io.compress CodecPool returnCompressor.

Prototype

public static void returnCompressor(Compressor compressor) 

Source Link

Document

Return the Compressor to the pool.

Usage

From source file:io.aos.hdfs.PooledStreamCompressor.java

License:Apache License

public static void main(String... args) throws Exception {
    String codecClassname = args[0];
    Class<?> codecClass = Class.forName(codecClassname);
    Configuration conf = new Configuration();
    CompressionCodec codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, conf);
    /*[*/Compressor compressor = null;
    try {//  w  ww.  j  a v  a  2s.  com
        compressor = CodecPool.getCompressor(codec);/*]*/
        CompressionOutputStream out = codec.createOutputStream(System.out, /*[*/compressor/*]*/);
        IOUtils.copyBytes(System.in, out, 4096, false);
        out.finish();
        /*[*/} finally {
        CodecPool.returnCompressor(compressor);
    } /*]*/
}

From source file:io.transwarp.flume.sink.HDFSCompressedDataStream.java

License:Apache License

@Override
public void close() throws IOException {
    serializer.flush();//from  w  w w  . j  a  va  2 s.  c  om
    serializer.beforeClose();
    if (!isFinished) {
        cmpOut.finish();
        isFinished = true;
    }
    fsOut.flush();
    hflushOrSync(fsOut);
    cmpOut.close();
    if (compressor != null) {
        CodecPool.returnCompressor(compressor);
        compressor = null;
    }
    unregisterCurrentStream();
}

From source file:nl.basjes.hadoop.io.compress.TestSplittableCodecSeams.java

License:Apache License

/**
 * Write the specified number of records to file in test dir using codec.
 * Records are simply lines random ASCII
 *//* ww  w  . j  a  v a  2 s  .  c o m*/
private static Path writeSplitTestFile(final Configuration conf,
        final Class<? extends SplittableCompressionCodec> codecClass, final long records,
        final int recordLength, final int trailingSizeJitter, final int randomizeEveryNChars)
        throws IOException {

    RAND.setSeed(1); // Make the tests better reproducable

    final FileSystem fs = FileSystem.getLocal(conf);
    final SplittableCompressionCodec codec = ReflectionUtils.newInstance(codecClass, conf);

    final Path wd = new Path(new Path(System.getProperty("test.build.data", "/tmp")).makeQualified(fs.getUri(),
            fs.getWorkingDirectory()), codec.getClass().getSimpleName());

    final Path file = new Path(wd,
            "test-" + records + "-" + recordLength + "-" + trailingSizeJitter + codec.getDefaultExtension());
    DataOutputStream out = null;
    final Compressor cmp = CodecPool.getCompressor(codec);
    try {
        out = new DataOutputStream(codec.createOutputStream(fs.create(file, true), cmp));

        for (long seq = 1; seq <= records; ++seq) {
            final String line = randomGibberish(
                    recordLength + (trailingSizeJitter > 0 ? RAND.nextInt(trailingSizeJitter) : 0),
                    randomizeEveryNChars) + "\n";
            // There must be a simpler way to output ACSII instead of 2 byte UNICODE
            out.writeBytes(new String(line.getBytes("UTF-8"), "US-ASCII"));
        }
    } finally {
        IOUtils.cleanup(LOG, out);
        CodecPool.returnCompressor(cmp);
    }
    return file;
}

From source file:org.springframework.data.hadoop.serialization.ResourceSerializationFormat.java

License:Apache License

/**
 * Writes the content of Spring {@link Resource}s to a single HDFS location.
 *///w w  w .  ja va 2 s  . co m
@Override
protected SerializationWriterSupport createWriter(final OutputStream output) {
    // Extend and customize Serialization Writer template
    return new SerializationWriterSupport() {

        private OutputStream outputStream = output;

        private InputStream resourceSeparatorInputStream;

        @Override
        protected Closeable doOpen() throws IOException {

            resourceSeparatorInputStream = null;

            CompressionCodec codec = CompressionUtils.getHadoopCompression(getConfiguration(),
                    getCompressionAlias());

            // If a compression is not specified and if passed stream does have compression capabilities...
            if (codec == null || CompressionOutputStream.class.isInstance(outputStream)) {
                // ...just return original stream untouched
                return outputStream;
            }

            // Eventually re-use Compressor from underlying CodecPool
            final Compressor compressor = CodecPool.getCompressor(codec);

            // Create compression stream wrapping passed stream
            outputStream = codec.createOutputStream(outputStream, compressor);

            return new Closeable() {

                @Override
                public void close() throws IOException {
                    resourceSeparatorInputStream = null;
                    IOUtils.closeStream(outputStream);
                    CodecPool.returnCompressor(compressor);
                }
            };
        }

        @Override
        protected void doWrite(Resource source) throws IOException {
            InputStream inputStream = null;
            try {
                writeSeparator();

                inputStream = source.getInputStream();

                // Write source to HDFS destination
                copyBytes(inputStream, outputStream, getConfiguration(), /* close */false);

            } finally {
                closeStream(inputStream);
            }
        }

        protected void writeSeparator() throws IOException {
            if (getResourceSeparator() == null) {
                return;
            }

            if (resourceSeparatorInputStream == null) {

                // First call inits 'resourceSeparatorInputStream' and does not write anything

                resourceSeparatorInputStream = toInputStream(getResourceSeparator(), "UTF-8");

                return;
            }

            resourceSeparatorInputStream.reset();

            // Write resource separator to HDFS destination
            copyBytes(resourceSeparatorInputStream, outputStream, getConfiguration(), /* close */false);
        }
    };
}