Example usage for org.apache.hadoop.io.compress DefaultCodec createOutputStream

List of usage examples for org.apache.hadoop.io.compress DefaultCodec createOutputStream

Introduction

In this page you can find the example usage for org.apache.hadoop.io.compress DefaultCodec createOutputStream.

Prototype

@Override
    public CompressionOutputStream createOutputStream(OutputStream out) throws IOException 

Source Link

Usage

From source file:TestCodec.java

License:Open Source License

public static void main(String[] args) throws IOException {
    Configuration conf = new Configuration();
    DefaultCodec codec = new DefaultCodec();
    codec.setConf(conf);/*from   w  w  w .  j  a va  2 s. c  o m*/
    DataOutputBuffer chunksWriteBuffer = new DataOutputBuffer();
    CompressionOutputStream compressionOutputStream = codec.createOutputStream(chunksWriteBuffer);

    DataInputBuffer chunkReadBuffer = new DataInputBuffer();
    CompressionInputStream compressionInputStream = codec.createInputStream(chunkReadBuffer);
    String str = "laksjldfkjalskdjfl;aksjdflkajsldkfjalksjdflkajlsdkfjlaksjdflka";
    compressionOutputStream.write(str.getBytes());
    compressionOutputStream.finish();
    byte[] data = chunksWriteBuffer.getData();
    System.out.println(str.length());
    System.out.println(chunksWriteBuffer.getLength());

    chunkReadBuffer.reset(data, chunksWriteBuffer.getLength());

    DataOutputBuffer dob = new DataOutputBuffer();
    IOUtils.copyBytes(compressionInputStream, dob, conf);
    System.out.println(dob.getData());

}

From source file:org.springframework.data.hadoop.fs.HdfsResourceLoaderLegacyTest.java

License:Apache License

@Test
public void testDecompressedStream() throws Exception {
    DefaultCodec codec = new DefaultCodec();
    codec.setConf(fs.getConf());/*from   w  w  w .jav a  2 s  .c  o m*/
    String name = "local/" + UUID.randomUUID() + codec.getDefaultExtension();
    OutputStream outputStream = codec.createOutputStream(fs.create(new Path(name)));
    byte[] content = name.getBytes();
    outputStream.write(content);
    outputStream.close();

    Resource resource = loader.getResource(name);
    assertNotNull(resource);
    InputStream inputStream = resource.getInputStream();
    assertEquals(DecompressorStream.class, inputStream.getClass());
    assertTrue(TestUtils.compareStreams(new ByteArrayInputStream(content), inputStream));
}

From source file:org.springframework.data.hadoop.fs.HdfsResourceLoaderLegacyTest.java

License:Apache License

@Test
public void testCompressedStream() throws Exception {

    DefaultCodec codec = new DefaultCodec();
    codec.setConf(fs.getConf());//from w  w w .j av  a  2  s  . c o m
    String name = "local/" + UUID.randomUUID() + codec.getDefaultExtension();
    OutputStream outputStream = codec.createOutputStream(fs.create(new Path(name)));
    byte[] content = name.getBytes();
    outputStream.write(content);
    outputStream.close();

    loader.setUseCodecs(false);

    try {
        Resource resource = loader.getResource(name);
        assertNotNull(resource);
        InputStream inputStream = resource.getInputStream();
        System.out.println(inputStream.getClass());
        assertFalse(DecompressorStream.class.equals(inputStream.getClass()));
        assertFalse(TestUtils.compareStreams(new ByteArrayInputStream(content), inputStream));
    } finally {
        loader.setUseCodecs(true);
    }
}