List of usage examples for org.apache.hadoop.io.compress CompressionCodec interface-usage
From source file io.airlift.compress.gzip.JdkGzipCodec.java
public class JdkGzipCodec implements CompressionCodec { @Override public CompressionOutputStream createOutputStream(OutputStream outputStream) throws IOException { return new HadoopJdkGzipOutputStream(outputStream, GZIP_BUFFER_SIZE); }
From source file io.airlift.compress.HadoopSnappyCodec.java
public class HadoopSnappyCodec implements CompressionCodec { @Override public CompressionOutputStream createOutputStream(OutputStream outputStream) throws IOException { return new SnappyCompressionOutputStream(outputStream); }
From source file io.airlift.compress.lz4.Lz4Codec.java
public class Lz4Codec implements Configurable, CompressionCodec { private Configuration conf; @Override public Configuration getConf() { return conf;
From source file io.airlift.compress.lzo.LzoCodec.java
public class LzoCodec implements Configurable, CompressionCodec { // Hadoop has a constant for this, but the LZO codebase uses a different value public static final int LZO_BUFFER_SIZE_DEFAULT = 256 * 1024; private Configuration conf;
From source file io.airlift.compress.lzo.LzopCodec.java
public class LzopCodec implements Configurable, CompressionCodec { static final byte[] LZOP_MAGIC = new byte[] { (byte) 0x89, 0x4c, 0x5a, 0x4f, 0x00, 0x0d, 0x0a, 0x1a, 0x0a }; static final int LZOP_IMPLEMENTATION_VERSION = 0x1010; static final byte LZO_1X_VARIANT = 1; private Configuration conf;
From source file io.airlift.compress.snappy.SnappyCodec.java
public class SnappyCodec implements Configurable, CompressionCodec { private Configuration conf; @Override public Configuration getConf() { return conf;
From source file org.anarres.lzo.hadoop.codec.LzoCodec.java
/**
* A {@link org.apache.hadoop.io.compress.CompressionCodec} for a streaming
* <b>lzo</b> compression/decompression pair.
* http://www.oberhumer.com/opensource/lzo/
*
*/
From source file org.apache.hawq.pxf.plugins.hdfs.utilities.NotSoNiceCodec.java
/** * Codec class for UtilitiesTest * Can't be embedded inside UtilitiesTest due to junit limitation. */ public class NotSoNiceCodec implements CompressionCodec {
From source file org.apache.parquet.hadoop.codec.SnappyCodec.java
/**
* Snappy compression codec for Parquet. We do not use the default hadoop
* one since that codec adds a blocking structure around the base snappy compression
* algorithm. This is useful for hadoop to minimize the size of compression blocks
* for their file formats (e.g. SequenceFile) but is undesirable for Parquet since
* we already have the data page which provides that.
From source file org.iq80.snappy.HadoopSnappyCodec.java
public class HadoopSnappyCodec implements CompressionCodec { @Override public CompressionOutputStream createOutputStream(OutputStream outputStream) throws IOException { return new SnappyCompressionOutputStream(outputStream); }