Example usage for org.apache.hadoop.io NullWritable get

List of usage examples for org.apache.hadoop.io NullWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io NullWritable get.

Prototype

public static NullWritable get() 

Source Link

Document

Returns the single instance of this class.

Usage

From source file:com.willetinc.hadoop.mapreduce.dynamodb.DynamoDBOutputFormatTest.java

License:Apache License

@Test
public void testDynamoDBRecordWriter()
        throws InstantiationException, IllegalAccessException, IOException, InterruptedException {

    AmazonDynamoDBClient client = createMock(AmazonDynamoDBClient.class);
    TaskAttemptContext context = createMock(TaskAttemptContext.class);
    DynamoDBOutputFormat<MyTable, NullWritable> outputFormat = new DynamoDBOutputFormat<MyTable, NullWritable>();

    RecordWriter<MyTable, NullWritable> writer = outputFormat.getRecordWriter(client, TABLE_NAME);

    Capture<PutItemRequest> putCapture = new Capture<PutItemRequest>();
    expect(client.putItem(capture(putCapture))).andReturn(new PutItemResult());
    client.shutdown();/*from w  ww . j  a v a 2s  .co  m*/

    AttributeValue hashKey = new AttributeValue().withN(HASHKEY_VALUE);
    AttributeValue rangeKey = new AttributeValue().withN(RANGEKEY_VALUE);

    MyTable record = new MyTable();
    record.setHashKeyValue(hashKey);
    record.setRangeKeyValue(rangeKey);

    replay(client);
    replay(context);

    writer.write(record, NullWritable.get());
    writer.close(context);
    PutItemRequest put = putCapture.getValue();
    Map<String, AttributeValue> item = put.getItem();

    assertEquals(2, item.size());
    assertEquals(hashKey, item.get(HASHKEY_FIELD));
    assertEquals(rangeKey, item.get(RANGEKEY_FIELD));

    verify(client);
    verify(context);
}

From source file:com.wipro.ats.bdre.datagen.mr.RangeRecordReader.java

License:Apache License

@Override
public NullWritable getCurrentValue() {
    return NullWritable.get();
}

From source file:com.wipro.ats.bdre.dq.DQMapper.java

License:Apache License

@Override
public void map(LongWritable key, Text value, org.apache.hadoop.mapreduce.Mapper.Context context)
        throws IOException, InterruptedException {
    DQDataModel dqDataModel = validateRecord(value.toString());
    LOGGER.trace("map() :: " + value.toString() + " = " + dqDataModel.getmInvalidRecordMessage());
    if (dqDataModel.isValidRecord()) {
        goodRecords++;/*from w  ww.j ava2s  . co  m*/
        mOutputKey.set(dqDataModel.getmRecord());
        mos.write(DQConstants.GOOD_RECORDS_FILE, mOutputKey, NullWritable.get(),
                DQConstants.INTERMEDIATE_GOOD_RECORD_OUTPUT_DIR);
    } else {
        badRecords++;
        mOutputKey.set(dqDataModel.getmInvalidRecordMessage());
        mOutputValue.set(value.toString());
        mos.write(DQConstants.BAD_RECORDS_FILE, mOutputKey, mOutputValue,
                DQConstants.INTERMEDIATE_BAD_RECORD_OUTPUT_DIR);
    }
}

From source file:com.wipro.ats.bdre.dq.DQMapper.java

License:Apache License

@Override
public void cleanup(org.apache.hadoop.mapreduce.Mapper.Context context)
        throws IOException, InterruptedException {
    try {/*w w  w .  jav a2s .  com*/
        mos.write(DQConstants.FILE_REPORT_FILE, new Text(DQConstants.GOOD_RECORDS_FILE + " : " + goodRecords),
                NullWritable.get(), DQConstants.INTERMEDIATE_REPORT_OUTPUT_DIR);
        mos.write(DQConstants.FILE_REPORT_FILE, new Text(DQConstants.BAD_RECORDS_FILE + " : " + badRecords),
                NullWritable.get(), DQConstants.INTERMEDIATE_REPORT_OUTPUT_DIR);
    } catch (Exception e) {
        LOGGER.info(e);
        LOGGER.info("cleanup : " + e.toString());
    } finally {
        mos.close();
    }
}

From source file:com.yourcompany.hadoop.mapreduce.aggregate.UnionMapper.java

License:Apache License

@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
    context.write(NullWritable.get(), value);
}

From source file:com.yourcompany.hadoop.mapreduce.lexical.LexicalAnalyzerMapper.java

License:Apache License

@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
    String row = value.toString();
    TokenStream tokenStream = analyzer.tokenStream("dummy", new StringReader(row));
    tokenStream.reset();/* w ww  . ja va2 s  .c  o m*/
    List<String> tokens = collectExtractedNouns(tokenStream);
    for (String token : tokens) {
        context.write(NullWritable.get(), new Text(token));
    }
}

From source file:com.zjy.mongo.input.BSONFileRecordReader.java

License:Apache License

@Override
public Object getCurrentKey() throws IOException, InterruptedException {
    Object key = null;// w w  w  . j ava 2s.  c  o m
    if (fileSplit instanceof BSONFileSplit) {
        key = MongoPathRetriever.get(value, ((BSONFileSplit) fileSplit).getKeyField());
    }
    return key != null ? key : NullWritable.get();
}

From source file:com.zjy.mongo.input.MongoRecordReader.java

License:Apache License

@Override
public Object getCurrentKey() {
    Object key = MongoPathRetriever.get(current, split.getKeyField());
    return null != key ? key : NullWritable.get();
}

From source file:com.zjy.mongo.mapred.input.BSONFileRecordReader.java

License:Apache License

public NullWritable createKey() {
    return NullWritable.get();
}

From source file:comm.PrintKeysReducer.java

License:Open Source License

public void reduce(Text _key, Iterable<Object> values, Context context)
        throws IOException, InterruptedException {
    // process values
    context.write(_key, NullWritable.get());
    //      System.out.println(_key);
}