Example usage for org.apache.hadoop.io.compress DefaultCodec setConf

List of usage examples for org.apache.hadoop.io.compress DefaultCodec setConf

Introduction

In this page you can find the example usage for org.apache.hadoop.io.compress DefaultCodec setConf.

Prototype

@Override
    public void setConf(Configuration conf) 

Source Link

Usage

From source file:com.streamsets.pipeline.stage.destination.hdfs.writer.TestRecordWriterManager.java

License:Apache License

@Test
public void testNoThreshold() throws Exception {
    URI uri = new URI("file:///");
    Configuration conf = new HdfsConfiguration();
    String prefix = "prefix";
    String template = getTestDir().toString()
            + "/${YYYY()}/${MM()}/${DD()}/${hh()}/${mm()}/${ss()}/${record:value('/')}";
    TimeZone timeZone = TimeZone.getTimeZone("UTC");
    long cutOffSecs = 10;
    long cutOffSize = 0;
    long cutOffRecords = 0;
    HdfsFileType fileType = HdfsFileType.TEXT;
    DefaultCodec compressionCodec = new DefaultCodec();
    compressionCodec.setConf(conf);
    SequenceFile.CompressionType compressionType = null;
    String keyEL = null;//from  w w  w  .  j ava2s.  co  m
    DataGeneratorFactory generatorFactory = new DummyDataGeneratorFactory(null);
    RecordWriterManager mgr = new RecordWriterManager(uri, conf, prefix, template, timeZone, cutOffSecs,
            cutOffSize, cutOffRecords, fileType, compressionCodec, compressionType, keyEL, generatorFactory,
            targetContext, "dirPathTemplate");
    Assert.assertTrue(mgr.validateDirTemplate("g", "dirPathTemplate", new ArrayList<Stage.ConfigIssue>()));
    Date now = getFixedDate();

    Date recordDate = now;
    Record record = RecordCreator.create();
    record.set(Field.create("a"));
    RecordWriter writer = mgr.getWriter(now, recordDate, record);
    Assert.assertNotNull(writer);
    for (int i = 0; i < 10; i++) {
        Assert.assertFalse(mgr.isOverThresholds(writer));
        writer.write(record);
        writer.flush();
    }
    Assert.assertFalse(mgr.isOverThresholds(writer));
    mgr.commitWriter(writer);
}

From source file:org.springframework.data.hadoop.fs.HdfsResourceLoaderLegacyTest.java

License:Apache License

@Test
public void testDecompressedStream() throws Exception {
    DefaultCodec codec = new DefaultCodec();
    codec.setConf(fs.getConf());
    String name = "local/" + UUID.randomUUID() + codec.getDefaultExtension();
    OutputStream outputStream = codec.createOutputStream(fs.create(new Path(name)));
    byte[] content = name.getBytes();
    outputStream.write(content);//from w  ww.ja va2s  .c o  m
    outputStream.close();

    Resource resource = loader.getResource(name);
    assertNotNull(resource);
    InputStream inputStream = resource.getInputStream();
    assertEquals(DecompressorStream.class, inputStream.getClass());
    assertTrue(TestUtils.compareStreams(new ByteArrayInputStream(content), inputStream));
}

From source file:org.springframework.data.hadoop.fs.HdfsResourceLoaderLegacyTest.java

License:Apache License

@Test
public void testCompressedStream() throws Exception {

    DefaultCodec codec = new DefaultCodec();
    codec.setConf(fs.getConf());
    String name = "local/" + UUID.randomUUID() + codec.getDefaultExtension();
    OutputStream outputStream = codec.createOutputStream(fs.create(new Path(name)));
    byte[] content = name.getBytes();
    outputStream.write(content);/*from ww w  .  ja  v  a  2 s  .  c om*/
    outputStream.close();

    loader.setUseCodecs(false);

    try {
        Resource resource = loader.getResource(name);
        assertNotNull(resource);
        InputStream inputStream = resource.getInputStream();
        System.out.println(inputStream.getClass());
        assertFalse(DecompressorStream.class.equals(inputStream.getClass()));
        assertFalse(TestUtils.compareStreams(new ByteArrayInputStream(content), inputStream));
    } finally {
        loader.setUseCodecs(true);
    }
}