Example usage for org.apache.hadoop.io NullWritable get

List of usage examples for org.apache.hadoop.io NullWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io NullWritable get.

Prototype

public static NullWritable get() 

Source Link

Document

Returns the single instance of this class.

Usage

From source file:org.apache.rya.reasoning.mr.MRReasoningUtils.java

License:Apache License

/**
 * Load serialized schema information from a file.
 *//* w  w  w.j a  va 2 s  .  co  m*/
public static Schema loadSchema(Configuration conf) {
    SchemaWritable schema = new SchemaWritable();
    try {
        FileSystem fs = FileSystem.get(conf);
        Path schemaPath = getSchemaPath(conf);
        if (fs.isDirectory(schemaPath)) {
            for (FileStatus status : fs.listStatus(schemaPath)) {
                schemaPath = status.getPath();
                if (status.isFile() && status.getLen() > 0 && !schemaPath.getName().startsWith(DEBUG_OUT)) {
                    break;
                }
            }
        }
        SequenceFile.Reader in = new SequenceFile.Reader(conf, SequenceFile.Reader.file(schemaPath));
        NullWritable key = NullWritable.get();
        in.next(key, schema);
        in.close();
    } catch (IOException e) {
        e.printStackTrace();
    }
    return schema;
}

From source file:org.apache.solr.hadoop.ForkedTreeMergeMapper.java

License:Apache License

@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
    LOGGER.trace("map key: {}, value: {}", key, value);
    context.write(value, NullWritable.get());
}

From source file:org.apache.solr.hadoop.IdentityMapper.java

License:Apache License

@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
    LOGGER.info("map key: {}, value: {}", key, value);
    context.write(value, NullWritable.get());
}

From source file:org.apache.solr.hadoop.IdentityReducer.java

License:Apache License

@Override
protected void reduce(Text key, Iterable<NullWritable> values, Context context)
        throws IOException, InterruptedException {
    LOGGER.info("reduce key: {}, value: {}", key, values);
    context.write(key, NullWritable.get());
}

From source file:org.apache.solr.hadoop.LineRandomizerMapperReducerTest.java

License:Apache License

@Test
public void testMapReduce1Item() throws IOException {
    mapReduceDriver.withInput(new LongWritable(0), new Text("hello"));
    mapReduceDriver.withOutput(new Text("hello"), NullWritable.get());
    mapReduceDriver.runTest();//from  w ww .  ja v  a2  s.  c  o  m
}

From source file:org.apache.solr.hadoop.LineRandomizerMapperReducerTest.java

License:Apache License

@Test
public void testMapReduce2Items() throws IOException {
    mapReduceDriver.withAll(Arrays.asList(new Pair<LongWritable, Text>(new LongWritable(0), new Text("hello")),
            new Pair<LongWritable, Text>(new LongWritable(1), new Text("world"))));
    mapReduceDriver// ww w  .  j a  v  a  2s  .co  m
            .withAllOutput(Arrays.asList(new Pair<Text, NullWritable>(new Text("world"), NullWritable.get()),
                    new Pair<Text, NullWritable>(new Text("hello"), NullWritable.get())));
    mapReduceDriver.runTest();
}

From source file:org.apache.solr.hadoop.LineRandomizerMapperReducerTest.java

License:Apache License

@Test
public void testMapReduce3Items() throws IOException {
    mapReduceDriver.withAll(Arrays.asList(new Pair<LongWritable, Text>(new LongWritable(0), new Text("hello")),
            new Pair<LongWritable, Text>(new LongWritable(1), new Text("world")),
            new Pair<LongWritable, Text>(new LongWritable(2), new Text("nadja"))));
    mapReduceDriver//from  w w w. ja v a 2  s  . c om
            .withAllOutput(Arrays.asList(new Pair<Text, NullWritable>(new Text("nadja"), NullWritable.get()),
                    new Pair<Text, NullWritable>(new Text("world"), NullWritable.get()),
                    new Pair<Text, NullWritable>(new Text("hello"), NullWritable.get())));
    mapReduceDriver.runTest();
}

From source file:org.apache.solr.hadoop.LineRandomizerMapperReducerTest.java

License:Apache License

@Test
public void testMapReduce4Items() throws IOException {
    mapReduceDriver.withAll(Arrays.asList(new Pair<LongWritable, Text>(new LongWritable(0), new Text("hello")),
            new Pair<LongWritable, Text>(new LongWritable(1), new Text("world")),
            new Pair<LongWritable, Text>(new LongWritable(2), new Text("nadja")),
            new Pair<LongWritable, Text>(new LongWritable(3), new Text("basti"))));
    mapReduceDriver/*from  w w w  . ja  v  a  2s .c o m*/
            .withAllOutput(Arrays.asList(new Pair<Text, NullWritable>(new Text("nadja"), NullWritable.get()),
                    new Pair<Text, NullWritable>(new Text("world"), NullWritable.get()),
                    new Pair<Text, NullWritable>(new Text("basti"), NullWritable.get()),
                    new Pair<Text, NullWritable>(new Text("hello"), NullWritable.get())));
    mapReduceDriver.runTest();
}

From source file:org.apache.solr.hadoop.LineRandomizerReducer.java

License:Apache License

@Override
protected void reduce(LongWritable key, Iterable<Text> values, Context context)
        throws IOException, InterruptedException {
    for (Text value : values) {
        LOGGER.debug("reduce key: {}, value: {}", key, value);
        context.write(value, NullWritable.get());
    }//from  w  ww  .  j  a v  a  2  s  .  c  om
}