Example usage for org.apache.hadoop.io.compress CompressionInputStream read

List of usage examples for org.apache.hadoop.io.compress CompressionInputStream read

Introduction

In this page you can find the example usage for org.apache.hadoop.io.compress CompressionInputStream read.

Prototype

@Override
public abstract int read(byte[] b, int off, int len) throws IOException;

Source Link

Document

Read bytes from the stream.

Usage

From source file:Compress.TestLZO.java

License:Open Source License

public static void main(String[] argv) throws IOException {
    System.out.println(System.getProperty("java.library.path"));

    Configuration conf = new Configuration();

    conf.setInt("io.compression.codec.lzo.buffersize", 64 * 1024);

    LzoCodec codec = new LzoCodec();
    codec.setConf(conf);/*  ww w  .  j av  a2s  .c o  m*/

    OutputStream out = new DataOutputBuffer();
    CompressionOutputStream out2 = codec.createOutputStream(out);

    byte[] str2 = new byte[20];

    int num = 10000;
    int before = 0;
    String part = "hello konten hello konten";
    for (long i = 0; i < num; i++) {
        Util.long2bytes(str2, i);
        out2.write(str2, 0, 8);

    }
    out2.finish();

    byte[] buffer = ((DataOutputBuffer) out).getData();

    System.out.println("org len:" + num * 8 + ", compressed len:" + ((DataOutputBuffer) out).getLength());

    InputStream in = new DataInputBuffer();
    ((DataInputBuffer) in).reset(((DataOutputBuffer) out).getData(), 0, ((DataOutputBuffer) out).getLength());

    CompressionInputStream in2 = codec.createInputStream(in);

    byte[] buf = new byte[100];
    for (long i = 0; i < num; i++) {
        int count = 0;
        count = in2.read(buf, 0, 8);
        if (count > 0) {
            long value = Util.bytes2long(buf, 0, 8);
            if (value != i) {
                System.out.println(i + ",count:" + count + ",value:" + value);
            } else if (i > (num - 20)) {
                System.out.println(i + ",value:" + value);
            }

        } else {
            System.out.println("count:" + count + ", string " + i);
            break;
        }
    }

    in2.close();

    System.out.println("test compress array...");

    OutputStream out3 = new DataOutputBuffer();
    CompressionOutputStream out4 = codec.createOutputStream(out3);

    DataOutputBuffer tout3 = new DataOutputBuffer();

    for (long i = 0; i < num; i++) {
        Util.long2bytes(str2, i);
        out4.write(str2, 0, 8);
    }
    out4.finish();

    buffer = ((DataOutputBuffer) out3).getData();

    System.out.println("org len:" + num * 8 + ", compressed len:" + ((DataOutputBuffer) out3).getLength());

    InputStream in3 = new DataInputBuffer();
    ((DataInputBuffer) in3).reset(((DataOutputBuffer) out3).getData(), 0,
            ((DataOutputBuffer) out3).getLength());

    CompressionInputStream in4 = codec.createInputStream(in3);

    for (long i = 0; i < num; i++) {
        int count = 0;
        count = in4.read(buf, 0, 8);
        if (count > 0) {
            long value = Util.bytes2long(buf, 0, 8);
            if (value != i) {
                System.out.println(i + ",count:" + count + ",value:" + value);
            }

            if (i > (num - 20)) {
                System.out.println(i + ",value:" + value);
            }

        } else {
            System.out.println("count:" + count + ", string " + i);
            break;
        }
    }

    in2.close();

}

From source file:FormatStorage.Unit.java

License:Open Source License

public void transfer(long newOffset) throws Exception {
    long adjust = newOffset - offset;

    boolean VAR = segment.formatData.isVar();
    if (VAR) {//from w  ww .  j a  v a  2s.c  o  m
        if (!compressed) {
            int tnum = ((DataOutputBuffer) metasBuffer).getLength() / ConstVar.Sizeof_Long;
            if (tnum != recordNum) {
                throw new SEException.InnerException("tnum != recordNum");
            }

            DataOutputBuffer tmpOuputBuffer = new DataOutputBuffer();
            DataInputBuffer tmpinput = new DataInputBuffer();
            tmpinput.reset(((DataOutputBuffer) metasBuffer).getData(), 0,
                    ((DataOutputBuffer) metasBuffer).getLength());
            for (int i = 0; i < recordNum; i++) {
                long value = tmpinput.readLong() + adjust;
                tmpOuputBuffer.writeLong(value);
            }

            tmpinput.reset(tmpOuputBuffer.getData(), 0, tmpOuputBuffer.getLength());
            ((DataOutputBuffer) metasBuffer).reset();
            for (int i = 0; i < recordNum; i++) {
                ((DataOutputBuffer) metasBuffer).writeLong(tmpinput.readLong());
            }

            tmpOuputBuffer = null;
            tmpinput = null;
        } else {
            compressedMetasOutput.finish();

            InputStream tmpMetasInputStream = new DataInputBuffer();
            ((DataInputBuffer) tmpMetasInputStream).reset(((DataOutputBuffer) metasBuffer).getData(), 0,
                    ((DataOutputBuffer) metasBuffer).getLength());
            CompressionInputStream tmpCompressedMetasInput = codec.createInputStream(tmpMetasInputStream);

            DataOutputBuffer tmpOutputBuffer = new DataOutputBuffer();
            for (int i = 0; i < recordNum; i++) {
                int count = 0;
                try {
                    count = tmpCompressedMetasInput.read(metaOffsetBytes, 0, ConstVar.Sizeof_Long);
                    long meta = Util.bytes2long(metaOffsetBytes, 0, ConstVar.Sizeof_Long) + adjust;

                    tmpOutputBuffer.writeLong(meta);

                } catch (Exception e) {
                    e.printStackTrace();
                    System.out.println("i:" + i + ",count:" + count);

                    throw e;
                }
            }

            ((DataOutputBuffer) metasBuffer).reset();
            compressedMetasOutput.resetState();

            DataInputBuffer tmpInputBuffer = new DataInputBuffer();
            tmpInputBuffer.reset(tmpOutputBuffer.getData(), 0, tmpOutputBuffer.getLength());
            for (int i = 0; i < recordNum; i++) {
                long newMeta = tmpInputBuffer.readLong();
                Util.long2bytes(metaOffsetBytes, newMeta);
                compressedMetasOutput.write(metaOffsetBytes, 0, ConstVar.Sizeof_Long);
            }
        }
    }

    metaOffset += adjust;
    setOffset(newOffset);
}

From source file:org.apache.parquet.hadoop.TestSnappyCodec.java

License:Apache License

@Test
public void TestSnappyStream() throws IOException {
    SnappyCodec codec = new SnappyCodec();
    codec.setConf(new Configuration());

    int blockSize = 1024;
    int inputSize = blockSize * 1024;

    byte[] input = new byte[inputSize];
    for (int i = 0; i < inputSize; ++i) {
        input[i] = (byte) i;
    }//from  www  .j  a  va 2s  . co  m

    ByteArrayOutputStream compressedStream = new ByteArrayOutputStream();

    CompressionOutputStream compressor = codec.createOutputStream(compressedStream);
    int bytesCompressed = 0;
    while (bytesCompressed < inputSize) {
        int len = Math.min(inputSize - bytesCompressed, blockSize);
        compressor.write(input, bytesCompressed, len);
        bytesCompressed += len;
    }
    compressor.finish();

    byte[] rawCompressed = Snappy.compress(input);
    byte[] codecCompressed = compressedStream.toByteArray();

    // Validate that the result from the codec is the same as if we compressed the 
    // buffer directly.
    assertArrayEquals(rawCompressed, codecCompressed);

    ByteArrayInputStream inputStream = new ByteArrayInputStream(codecCompressed);
    CompressionInputStream decompressor = codec.createInputStream(inputStream);
    byte[] codecDecompressed = new byte[inputSize];
    int bytesDecompressed = 0;
    int numBytes;
    while ((numBytes = decompressor.read(codecDecompressed, bytesDecompressed, blockSize)) != 0) {
        bytesDecompressed += numBytes;
        if (bytesDecompressed == inputSize)
            break;
    }

    byte[] rawDecompressed = Snappy.uncompress(rawCompressed);

    assertArrayEquals(input, rawDecompressed);
    assertArrayEquals(input, codecDecompressed);
}

From source file:org.apache.tez.runtime.library.common.shuffle.TestShuffleUtils.java

License:Apache License

@Test
public void testInternalErrorTranslation() throws Exception {
    String codecErrorMsg = "codec failure";
    CompressionInputStream mockCodecStream = mock(CompressionInputStream.class);
    when(mockCodecStream.read(any(byte[].class), anyInt(), anyInt()))
            .thenThrow(new InternalError(codecErrorMsg));
    Decompressor mockDecoder = mock(Decompressor.class);
    CompressionCodec mockCodec = mock(CompressionCodec.class);
    when(mockCodec.createDecompressor()).thenReturn(mockDecoder);
    when(mockCodec.createInputStream(any(InputStream.class), any(Decompressor.class)))
            .thenReturn(mockCodecStream);
    byte[] header = new byte[] { (byte) 'T', (byte) 'I', (byte) 'F', (byte) 1 };
    try {/*from w ww.j  ava 2s.  co m*/
        ShuffleUtils.shuffleToMemory(new byte[1024], new ByteArrayInputStream(header), 1024, 128, mockCodec,
                false, 0, mock(Logger.class), "identifier");
        Assert.fail("shuffle was supposed to throw!");
    } catch (IOException e) {
        Assert.assertTrue(e.getCause() instanceof InternalError);
        Assert.assertTrue(e.getMessage().contains(codecErrorMsg));
    }
}