Example usage for org.apache.hadoop.io NullWritable get

List of usage examples for org.apache.hadoop.io NullWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io NullWritable get.

Prototype

public static NullWritable get() 

Source Link

Document

Returns the single instance of this class.

Usage

From source file:cascading.scheme.WritableSequenceFile.java

License:Open Source License

@Override
public void sink(TupleEntry tupleEntry, OutputCollector outputCollector) throws IOException {
    Object keyValue = NullWritable.get();
    Object valueValue = NullWritable.get();

    if (keyType == null) {
        valueValue = tupleEntry.getObject(getSinkFields());
    } else if (valueType == null) {
        keyValue = tupleEntry.getObject(getSinkFields());
    } else {/*from w w w  .j a va  2s  .  c o m*/
        keyValue = tupleEntry.getObject(getSinkFields().get(0));
        valueValue = tupleEntry.getObject(getSinkFields().get(1));
    }

    outputCollector.collect(keyValue, valueValue);
}

From source file:ch.ethz.las.wikimining.mr.coverage.GreeDiReducer.java

@Override
public void reduce(IntWritable key, Iterable<DocumentWithVectorWritable> values, Context context)
        throws IOException, InterruptedException {
    final WordCoverageFromMahout objectiveFunction = new WordCoverageFromMahout(values);
    final SfoGreedyAlgorithm sfo = new SfoGreedyLazy(objectiveFunction);
    final int selectCount = context.getConfiguration().getInt(Fields.SELECT_COUNT.get(),
            Defaults.SELECT_COUNT.get());

    Set<Integer> selected = sfo.run(objectiveFunction.getAllDocIds(), selectCount);

    for (Integer docId : selected) {
        IntWritable outValue = new IntWritable(docId);
        context.write(NullWritable.get(), outValue);
    }//  w ww .jav  a  2 s.  c o m
}

From source file:cityhubtopten.Top10Mapper.java

protected void cleanup(Context context) throws IOException, InterruptedException {

    for (Text t : ToRecordMap.values()) {

        context.write(NullWritable.get(), t);

    }// w  ww.j  av  a  2  s . com

}

From source file:cityhubtopten.Top10Reducer.java

public void reduce(NullWritable key, Iterable<Text> values, Context context)
        throws IOException, InterruptedException {

    for (Text value : values) {

        String line = value.toString();

        if (line.length() > 0) {

            String[] tokens = line.split(",");

            //split the data and fetch salary

            double quality = Double.parseDouble(tokens[6]);

            //insert salary as key and entire row as value

            //tree map sort the records based on salary

            ToRecordMap.put(new QualityOfLife(quality), new Text(value));

        }/*w w  w  . ja  v  a  2s .c om*/

    }

    // If we have more than ten records, remove the one with the lowest sal

    // As this tree map is sorted in descending order, the user with

    // the lowest sal is the last key.

    Iterator<Entry<QualityOfLife, Text>> iter = ToRecordMap.entrySet().iterator();

    Entry<QualityOfLife, Text> entry = null;

    while (ToRecordMap.size() > 10) {

        entry = iter.next();

        iter.remove();

    }

    for (Text t : ToRecordMap.descendingMap().values()) {

        // Output our ten records to the file system with a null key

        context.write(NullWritable.get(), t);

    }

}

From source file:clustering.tf_idf.DocCntMapper.java

License:Apache License

@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
    context.write(NullWritable.get(), this.outputValue);
}

From source file:clustering.tf_idf.DocCntReducer.java

License:Apache License

@Override
protected void reduce(NullWritable key, Iterable<IntWritable> values, Context context)
        throws IOException, InterruptedException {
    for (IntWritable value : values) {
        this.counter += value.get();
    }//ww  w .  ja v a2 s  . c om
    this.outputValue.set(this.counter);
    context.write(NullWritable.get(), this.outputValue);
}

From source file:co.cask.cdap.etl.batch.connector.ConnectorSink.java

License:Apache License

@Override
public void transform(StructuredRecord input, Emitter<KeyValue<NullWritable, Text>> emitter) throws Exception {
    if (writeSchema) {
        input = modifyRecord(input);//from w  w  w  .j  a  va 2s  .com
    }
    emitter.emit(
            new KeyValue<>(NullWritable.get(), new Text(StructuredRecordStringConverter.toJsonString(input))));
}

From source file:co.cask.cdap.etl.batch.mapreduce.ErrorOutputWriter.java

License:Apache License

void write(Collection<InvalidEntry<Object>> input) throws Exception {
    for (InvalidEntry entry : input) {
        context.write(errorDatasetName, new AvroKey<>(getGenericRecordForInvalidEntry(entry)),
                NullWritable.get());
    }/*from  w  w w  . java 2s .co m*/
}

From source file:co.cask.cdap.etl.mock.batch.MockExternalSink.java

License:Apache License

@Override
public void transform(StructuredRecord input, Emitter<KeyValue<NullWritable, String>> emitter)
        throws Exception {
    emitter.emit(new KeyValue<>(NullWritable.get(), GSON.toJson(input)));
}

From source file:co.cask.cdap.template.etl.batch.sink.TimePartitionedFileSetDatasetAvroSink.java

License:Apache License

@Override
public void transform(StructuredRecord input, Emitter<KeyValue<AvroKey<GenericRecord>, NullWritable>> emitter)
        throws Exception {
    emitter.emit(new KeyValue<>(new AvroKey<>(recordTransformer.transform(input)), NullWritable.get()));
}