Example usage for org.apache.hadoop.mapreduce.lib.output FileOutputFormat subclass-usage

List of usage examples for org.apache.hadoop.mapreduce.lib.output FileOutputFormat subclass-usage

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce.lib.output FileOutputFormat subclass-usage.

Usage

From source file com.linkedin.cubert.pig.piggybank.storage.avro.PigAvroOutputFormat.java

/**
 * The OutputFormat for avro data.
 *
 */
public class PigAvroOutputFormat extends FileOutputFormat<NullWritable, Object> {

From source file com.linkedin.json.JsonSequenceFileOutputFormat.java

/**
 *
 */
public class JsonSequenceFileOutputFormat extends FileOutputFormat<Object, Object> {
    @Override
    public RecordWriter<Object, Object> getRecordWriter(final TaskAttemptContext context)

From source file com.linkedin.pinot.hadoop.io.PinotOutputFormat.java

/**
 * Generic Pinot Output Format implementation.
 * @param <K>
 * @param <V>
 */
public class PinotOutputFormat<K, V> extends FileOutputFormat<K, V> {

From source file com.marklogic.contentpump.ArchiveOutputFormat.java

/**
 * OutputFormat for archive exported from MarkLogic.
 * @author ali
 *
 */
public class ArchiveOutputFormat extends FileOutputFormat<DocumentURI, MarkLogicDocument> {

From source file com.marklogic.contentpump.SingleDocumentOutputFormat.java

/**
 * OutputFormat for DocumentURI and MarkLogicDocument creating a single file.
 * 
 * @author ali
 */
public class SingleDocumentOutputFormat extends FileOutputFormat<DocumentURI, MarkLogicDocument> {

From source file com.marklogic.mapreduce.examples.BinaryReader.java

class BinaryOutputFormat extends FileOutputFormat<DocumentURI, BytesWritable> {

    @Override
    public RecordWriter<DocumentURI, BytesWritable> getRecordWriter(TaskAttemptContext context)
            throws IOException, InterruptedException {
        return new BinaryWriter(getOutputPath(context), context.getConfiguration());

From source file com.metamx.milano.hadoop.MilanoProtoFileOutputFormat.java

/**
 *
 */
public class MilanoProtoFileOutputFormat<K> extends FileOutputFormat<K, Message> {
    private Logger log = Logger.getLogger(MilanoProtoFileOutputFormat.class);
    private MilanoTypeMetadata.TypeMetadata metadata;

From source file com.mongodb.hadoop.BSONFileOutputFormat.java

public class BSONFileOutputFormat<K, V> extends FileOutputFormat<K, V> {

    @Override
    public RecordWriter<K, V> getRecordWriter(final TaskAttemptContext context) throws IOException {
        // Open data output stream

From source file com.moz.fiji.mapreduce.output.framework.FijiHFileOutputFormat.java

/**
 * Hadoop output format that writes HFiles that can be loaded directly into HBase region servers.
 *
 * <p> Allows writing entries to any HBase family of the target table.
 *     Each reduce task generates a set of files per HBase family.
 *

From source file com.neusoft.hbase.test.hadoop.dataload.HFileOutputFormat2.java

/**
 * Writes HFiles. Passed Cells must arrive in order.
 * Writes current time as the sequence id for the file. Sets the major compacted
 * attribute on created hfiles. Calling write(null,null) will forceably roll
 * all HFiles being written.
 * <p>