Example usage for org.apache.hadoop.io.compress CompressionInputStream subclass-usage

List of usage examples for org.apache.hadoop.io.compress CompressionInputStream subclass-usage

Introduction

In this page you can find the example usage for org.apache.hadoop.io.compress CompressionInputStream subclass-usage.

Usage

From source file com.netflix.bdp.inviso.fs.WrappedCompressionInputStream.java

/**
 * Wrapping CompressionInputStream so the job history loader can read a compressed stream.
 *
 * @author dweeks
 */
public class WrappedCompressionInputStream extends CompressionInputStream implements PositionedReadable {

From source file data.intelligence.platform.yarn.etl.io.SchemaAwareCompressionInputStream.java

/**
 *
 * SchemaAwareCompressionInputStream adds the ability to inform the compression
 * stream what column is being read.
 *
 */

From source file io.airlift.compress.gzip.HadoopJdkGzipInputStream.java

class HadoopJdkGzipInputStream extends CompressionInputStream {
    private final byte[] oneByte = new byte[1];
    private final GZIPInputStream input;

    public HadoopJdkGzipInputStream(InputStream input, int bufferSize) throws IOException {
        super(input);

From source file io.airlift.compress.lz4.HadoopLz4InputStream.java

class HadoopLz4InputStream extends CompressionInputStream {
    private final Lz4Decompressor decompressor = new Lz4Decompressor();
    private final InputStream in;
    private final byte[] uncompressedChunk;

    private int uncompressedBlockLength;

From source file io.airlift.compress.lzo.HadoopLzoInputStream.java

class HadoopLzoInputStream extends CompressionInputStream {
    private final LzoDecompressor decompressor = new LzoDecompressor();
    private final InputStream in;
    private final byte[] uncompressedChunk;

    private int uncompressedBlockLength;

From source file io.airlift.compress.lzo.HadoopLzopInputStream.java

class HadoopLzopInputStream extends CompressionInputStream {
    private static final int LZO_IMPLEMENTATION_VERSION = 0x2060;

    private final LzoDecompressor decompressor = new LzoDecompressor();
    private final InputStream in;
    private final byte[] uncompressedChunk;

From source file io.airlift.compress.snappy.HadoopSnappyInputStream.java

class HadoopSnappyInputStream extends CompressionInputStream {
    private final SnappyDecompressor decompressor = new SnappyDecompressor();
    private final InputStream in;

    private int uncompressedBlockLength;
    private byte[] uncompressedChunk = new byte[0];

From source file org.apache.asterix.hivecompat.io.SchemaAwareCompressionInputStream.java

/**
 *
 * SchemaAwareCompressionInputStream adds the ability to inform the compression
 * stream what column is being read.
 *
 */

From source file org.apache.tajo.storage.rcfile.SchemaAwareCompressionInputStream.java

/**
 *
 * SchemaAwareCompressionInputStream adds the ability to inform the compression
 * stream what column is being read.
 *
 */

From source file tajo.storage.rcfile.SchemaAwareCompressionInputStream.java

/**
 *
 * SchemaAwareCompressionInputStream adds the ability to inform the compression
 * stream what column is being read.
 *
 */