List of usage examples for org.apache.hadoop.mapreduce RecordWriter subclass-usage
From source file com.linkedin.cubert.io.rubix.RubixRecordWriter.java
/**
* Writes data in rubix binary file format.
*
* The rubix file format stores all values together in the beginning of the file, followed
* by all keys together at the end of the file. This RecordWriter caches all keys in
* memory, and dumps them to file when all record values are written. It is, therefore,
From source file com.linkedin.cubert.pig.piggybank.storage.avro.PigAvroRecordWriter.java
/** * The RecordWriter used to output pig results as avro data */ public class PigAvroRecordWriter extends RecordWriter<NullWritable, Object> { private DataFileWriter<Object> writer;
From source file com.linkedin.pinot.hadoop.io.PinotRecordWriter.java
/** * Basic Single Threaded {@link RecordWriter} */ public class PinotRecordWriter<K, V> extends RecordWriter<K, V> { private final static Logger LOGGER = LoggerFactory.getLogger(PinotRecordWriter.class);
From source file com.marklogic.contentpump.ArchiveWriter.java
/** * RecordWriter that writes <DocumentURI, MarkLogicDocument> to zip files. * * @author jchen */ public class ArchiveWriter extends RecordWriter<DocumentURI, MarkLogicDocument>
From source file com.marklogic.contentpump.SingleDocumentWriter.java
/**
* RecordWriter for <DocumentURI, MarkLogicDocument> creating a single
* file.
*
* @author jchen
*/
From source file com.marklogic.mapreduce.examples.BinaryReader.java
class BinaryWriter extends RecordWriter<DocumentURI, BytesWritable> { Path dir; Configuration conf; public BinaryWriter(Path path, Configuration conf) {
From source file com.marklogic.mapreduce.MarkLogicRecordWriter.java
/**
* A RecordWriter that persists MarkLogicRecord to MarkLogic server.
*
* @author jchen
*
*/
From source file com.mongodb.hadoop.output.BSONFileRecordWriter.java
public class BSONFileRecordWriter<K, V> extends RecordWriter<K, V> { private BSONEncoder bsonEnc = new BasicBSONEncoder(); private FSDataOutputStream outFile = null; private FSDataOutputStream splitsFile = null; private long bytesWritten = 0L;
From source file com.mongodb.hadoop.output.MongoRecordWriter.java
public class MongoRecordWriter<K, V> extends RecordWriter<K, V> { private final List<DBCollection> collections; private final int numberOfHosts; private final TaskAttemptContext context;
From source file com.pinterest.terrapin.hadoop.HFileRecordWriter.java
/** * HFileRecordWriter for write key value pairs to HFile */ public class HFileRecordWriter extends RecordWriter<BytesWritable, BytesWritable> { private StoreFile.Writer writer;