List of usage examples for org.apache.hadoop.mapreduce RecordWriter subclass-usage
From source file kafka.bridge.hadoop.KafkaRecordWriter.java
public class KafkaRecordWriter<W extends BytesWritable> extends RecordWriter<NullWritable, W> { protected SyncProducer producer; protected String topic; protected List<Message> msgList = new ArrayList<Message>(); protected int totalSize = 0;
From source file kafka.bridge.hadoop2.KafkaRecordWriter.java
public class KafkaRecordWriter<K, V> extends RecordWriter<K, V> { protected Producer<Object, byte[]> producer; protected String topic; protected List<KeyedMessage<Object, byte[]>> msgList = new LinkedList<KeyedMessage<Object, byte[]>>(); protected int totalBytes = 0;
From source file ml.shifu.guagua.mapreduce.GuaguaRecordWriter.java
/** * We don't use {@link GuaguaRecordWriter} but hadoop MapReduce needs it. */ public class GuaguaRecordWriter extends RecordWriter<Text, Text> { @Override
From source file nl.bioinf.wvanhelvoirt.HadoopPhredCalculator.FastqFileRecordWriter.java
/**
* FastqFileRecordWriter
*
* This is a custom class to write the output of the Reducer to a fastqc file.
*
* @author Wout van Helvoirt
From source file org.apache.accumulo.examples.wikisearch.output.BufferingRFileRecordWriter.java
final class BufferingRFileRecordWriter extends RecordWriter<Text, Mutation> { private final long maxSize; private final Configuration conf; private long size; private Map<Text, TreeMap<Key, Value>> buffers = new HashMap<Text, TreeMap<Key, Value>>();
From source file org.apache.accumulo.hadoopImpl.mapreduce.AccumuloRecordWriter.java
/** * A base class to be used to create {@link RecordWriter} instances that write to Accumulo. */ public class AccumuloRecordWriter extends RecordWriter<Text, Mutation> { // class to serialize configuration under in the job private static final Class<AccumuloOutputFormat> CLASS = AccumuloOutputFormat.class;
From source file org.apache.avro.mapreduce.AvroKeyRecordWriter.java
/** * Writes Avro records to an Avro container file output stream. * * @param <T> The Java type of the Avro data to write. */ public class AvroKeyRecordWriter<T> extends RecordWriter<AvroKey<T>, NullWritable> {
From source file org.apache.avro.mapreduce.AvroKeyValueRecordWriter.java
/**
* Writes key/value pairs to an Avro container file.
*
* <p>Each entry in the Avro container file will be a generic record with two fields,
* named 'key' and 'value'. The input types may be basic Writable objects like Text or
* IntWritable, or they may be AvroWrapper subclasses (AvroKey or AvroValue). Writable
From source file org.apache.carbondata.streaming.CarbonStreamRecordWriter.java
/** * Stream record writer */ public class CarbonStreamRecordWriter extends RecordWriter<Void, Object> { private static final Logger LOGGER = LogServiceFactory.getLogService(CarbonStreamRecordWriter.class.getName());
From source file org.apache.cassandra.hadoop.AbstractBulkRecordWriter.java
public abstract class AbstractBulkRecordWriter<K, V> extends RecordWriter<K, V> implements org.apache.hadoop.mapred.RecordWriter<K, V> { public final static String OUTPUT_LOCATION = "mapreduce.output.bulkoutputformat.localdir"; public final static String BUFFER_SIZE_IN_MB = "mapreduce.output.bulkoutputformat.buffersize"; public final static String STREAM_THROTTLE_MBITS = "mapreduce.output.bulkoutputformat.streamthrottlembits"; public final static String MAX_FAILED_HOSTS = "mapreduce.output.bulkoutputformat.maxfailedhosts";