Example usage for org.apache.hadoop.io.file.tfile TFile COMPRESSION_GZ

List of usage examples for org.apache.hadoop.io.file.tfile TFile COMPRESSION_GZ

Introduction

In this page you can find the example usage for org.apache.hadoop.io.file.tfile TFile COMPRESSION_GZ.

Prototype

String COMPRESSION_GZ

To view the source code for org.apache.hadoop.io.file.tfile TFile COMPRESSION_GZ.

Click Source Link

Document

compression: gzip

Usage

From source file:com.datatorrent.contrib.hdht.HadoopFilePerformanceTest.java

License:Open Source License

@Test
public void testTFileWriteGZ() throws Exception {
    Path file = Testfile.TFILE_GZ.filepath();
    logger.info("Writing {} with {} key/value pairs", file, String.format("%,d", testSize));

    startTimer();//from w w  w .ja  va 2s.  c  o m
    writeTFile(file, TFile.COMPRESSION_GZ);
    logger.info("Duration: {}", stopTimer(Testfile.TFILE_GZ, "WRITE"));

    Assert.assertTrue(hdfs.exists(file));
    ContentSummary fileInfo = hdfs.getContentSummary(file);
    logger.debug("Space consumed: {} bytes in {} files", String.format("%,d", fileInfo.getSpaceConsumed()),
            String.format("%,d", fileInfo.getFileCount()));
}

From source file:com.datatorrent.contrib.hdht.HadoopFilePerformanceTest.java

License:Open Source License

@Test
public void testTFileReadGZ() throws Exception {

    Path file = Testfile.TFILE_GZ.filepath();
    logger.info("Reading {} with {} key/value pairs", file, String.format("%,d", testSize));
    writeTFile(file, TFile.COMPRESSION_GZ);

    startTimer();/*from   w w  w.j  a v  a 2 s.  com*/
    readTFileSeq(file);
    logger.info("Duration for scanner.next() SEQUENTIAL keys: {}", stopTimer(Testfile.TFILE_GZ, "READ-SEQ"));

    startTimer();
    readTFileSeqId(file);
    logger.info("Duration for scanner.seekTo(key) SEQUENTIAL keys: {}",
            stopTimer(Testfile.TFILE_GZ, "READ-SEQ-ID"));

    startTimer();
    readTFileRandom(file);
    logger.info("Duration for scanner.seekTo(key) RANDOM keys: {}", stopTimer(Testfile.TFILE_GZ, "READ-RAND"));

}

From source file:com.datatorrent.contrib.hdht.HadoopFilePerformanceTest.java

License:Open Source License

@Test
public void testDTFileReadGZ() throws Exception {

    Path file = Testfile.DTFILE_GZ.filepath();
    logger.info("Reading {} with {} key/value pairs", file, String.format("%,d", testSize));
    writeTFile(file, TFile.COMPRESSION_GZ);

    startTimer();//from w w  w  .ja  v  a  2  s .  co m
    readDTFileSeq(file);
    logger.info("Duration for scanner.next() SEQUENTIAL keys: {}", stopTimer(Testfile.DTFILE_GZ, "READ-SEQ"));

    startTimer();
    readDTFileSeqId(file);
    logger.info("Duration for scanner.seekTo(key) SEQUENTIAL keys: {}",
            stopTimer(Testfile.DTFILE_GZ, "READ-SEQ-ID"));

    startTimer();
    readDTFileRandom(file);
    logger.info("Duration for scanner.seekTo(key) RANDOM keys: {}", stopTimer(Testfile.DTFILE_GZ, "READ-RAND"));

}

From source file:com.datatorrent.contrib.hdht.HDHTFileAccessTest.java

License:Open Source License

@Test
public void testTFileGZ() throws IOException {
    testTFile(TFile.COMPRESSION_GZ);
}

From source file:com.datatorrent.contrib.hdht.HDHTFileAccessTest.java

License:Open Source License

@Test
public void testDTFileGZ() throws IOException {
    testDTFile(TFile.COMPRESSION_GZ);
}