Example usage for org.apache.hadoop.io.compress DirectDecompressor decompress

List of usage examples for org.apache.hadoop.io.compress DirectDecompressor decompress

Introduction

In this page you can find the example usage for org.apache.hadoop.io.compress DirectDecompressor decompress.

Prototype

public void decompress(ByteBuffer src, ByteBuffer dst) throws IOException;

Source Link

Usage

From source file:parquet.hadoop.CodecFactoryExposer.java

License:Apache License

public BytesInput decompress(CompressionCodecName codecName, ByteBuf compressedByteBuf,
        ByteBuf uncompressedByteBuf, int compressedSize, int uncompressedSize) throws IOException {
    ByteBuffer inpBuffer = compressedByteBuf.nioBuffer(0, compressedSize);
    ByteBuffer outBuffer = uncompressedByteBuf.nioBuffer(0, uncompressedSize);
    CompressionCodec c = getCodec(codecName);
    //TODO: Create the decompressor only once at init time.
    Class<?> cx = c.getClass();

    DirectDecompressionCodec d = null;//ww w  .  java 2 s  .c  o  m
    DirectDecompressor decompr = null;

    if (DirectDecompressionCodec.class.isAssignableFrom(cx)) {
        d = (DirectDecompressionCodec) c;
    }

    if (d != null) {
        decompr = d.createDirectDecompressor();
    }

    if (d != null && decompr != null) {
        decompr.decompress(inpBuffer, outBuffer);
    } else {
        logger.warn("This Hadoop implementation does not support a " + codecName
                + " direct decompression codec interface. "
                + "Direct decompression is available only on *nix systems with Hadoop 2.3 or greater. "
                + "Read operations will be a little slower. ");
        BytesInput outBytesInp = this.decompress(new HadoopByteBufBytesInput(inpBuffer, 0, inpBuffer.limit()),
                uncompressedSize, codecName);
        // COPY the data back into the output buffer.
        // (DrillBufs can only refer to direct memory, so we cannot pass back a BytesInput backed
        // by a byte array).
        outBuffer.put(outBytesInp.toByteArray());
    }
    return new HadoopByteBufBytesInput(outBuffer, 0, outBuffer.limit());
}