Example usage for org.apache.hadoop.io.compress DefaultCodec DefaultCodec

List of usage examples for org.apache.hadoop.io.compress DefaultCodec DefaultCodec

Introduction

In this page you can find the example usage for org.apache.hadoop.io.compress DefaultCodec DefaultCodec.

Prototype

DefaultCodec

Source Link

Usage

From source file:org.apache.gobblin.metastore.FsStateStore.java

License:Apache License

/**
 * See {@link StateStore#putAll(String, String, Collection)}.
 *
 * <p>//from ww  w  .  j a  v  a 2  s  .  c om
 *   This implementation does not support putting the state objects into an existing store as
 *   append is to be supported by the Hadoop SequenceFile (HADOOP-7139).
 * </p>
 */
@Override
public void putAll(String storeName, String tableName, Collection<T> states) throws IOException {
    String tmpTableName = this.useTmpFileForPut ? TMP_FILE_PREFIX + tableName : tableName;
    Path tmpTablePath = new Path(new Path(this.storeRootDir, storeName), tmpTableName);

    if (!this.fs.exists(tmpTablePath) && !create(storeName, tmpTableName)) {
        throw new IOException("Failed to create a state file for table " + tmpTableName);
    }

    Closer closer = Closer.create();
    try {
        @SuppressWarnings("deprecation")
        SequenceFile.Writer writer = closer.register(SequenceFile.createWriter(this.fs, this.conf, tmpTablePath,
                Text.class, this.stateClass, SequenceFile.CompressionType.BLOCK, new DefaultCodec()));
        for (T state : states) {
            writer.append(new Text(Strings.nullToEmpty(state.getId())), state);
        }
    } catch (Throwable t) {
        throw closer.rethrow(t);
    } finally {
        closer.close();
    }

    if (this.useTmpFileForPut) {
        Path tablePath = new Path(new Path(this.storeRootDir, storeName), tableName);
        renamePath(tmpTablePath, tablePath);
    }
}

From source file:org.apache.hawq.pxf.plugins.hdfs.utilities.HdfsUtilitiesTest.java

License:Apache License

@Test
public void isSplittableCodec() {

    testIsSplittableCodec("no codec - splittable", "some/innocent.file", null, true);
    testIsSplittableCodec("gzip codec - not splittable", "/gzip.gz", new GzipCodec(), false);
    testIsSplittableCodec("default codec - not splittable", "/default.deflate", new DefaultCodec(), false);
    testIsSplittableCodec("bzip2 codec - splittable", "bzip2.bz2", new BZip2Codec(), true);
}

From source file:org.apache.jena.hadoop.rdf.io.input.compressed.jsonld.DeflatedJsonLDQuadInputTest.java

License:Apache License

/**
 * Creates new tests
 */
public DeflatedJsonLDQuadInputTest() {
    super(".jsonld.deflate", new DefaultCodec());
}

From source file:org.apache.jena.hadoop.rdf.io.input.compressed.jsonld.DeflatedJsonLDTripleInputTest.java

License:Apache License

/**
 * Creates new tests
 */
public DeflatedJsonLDTripleInputTest() {
    super(".jsonld.deflate", new DefaultCodec());
}

From source file:org.apache.jena.hadoop.rdf.io.input.compressed.nquads.DeflatedNQuadsInputTest.java

License:Apache License

/**
 * Creates new tests
 */
public DeflatedNQuadsInputTest() {
    super(".nq.deflate", new DefaultCodec());
}

From source file:org.apache.jena.hadoop.rdf.io.input.compressed.nquads.DeflatedWholeFileNQuadsInputTest.java

License:Apache License

/**
 * Creates new tests
 */
public DeflatedWholeFileNQuadsInputTest() {
    super(".nq.deflate", new DefaultCodec());
}

From source file:org.apache.jena.hadoop.rdf.io.input.compressed.ntriples.DeflatedBlockedNTriplesInput.java

License:Apache License

/**
 * Creates new tests
 */
public DeflatedBlockedNTriplesInput() {
    super(".nt.deflate", new DefaultCodec());
}

From source file:org.apache.jena.hadoop.rdf.io.input.compressed.ntriples.DeflatedNTriplesInputTest.java

License:Apache License

/**
 * Creates new tests
 */
public DeflatedNTriplesInputTest() {
    super(".nt.deflate", new DefaultCodec());
}

From source file:org.apache.jena.hadoop.rdf.io.input.compressed.ntriples.DeflatedWholeFileNTriplesInputTest.java

License:Apache License

/**
 * Creates new tests
 */
public DeflatedWholeFileNTriplesInputTest() {
    super(".nt.deflate", new DefaultCodec());
}

From source file:org.apache.jena.hadoop.rdf.io.input.compressed.rdfjson.DeflatedRdfJsonInputTest.java

License:Apache License

/**
 * Creates new tests
 */
public DeflatedRdfJsonInputTest() {
    super(".rj.deflate", new DefaultCodec());
}