Example usage for org.apache.hadoop.io NullWritable get

List of usage examples for org.apache.hadoop.io NullWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io NullWritable get.

Prototype

public static NullWritable get() 

Source Link

Document

Returns the single instance of this class.

Usage

From source file:org.apache.jena.hadoop.rdf.io.output.AbstractNodeTupleOutputFormatTests.java

License:Apache License

/**
 * Tests output//from w w  w . ja  va2s .c o  m
 * 
 * @param f
 *            File to output to
 * @param num
 *            Number of tuples to output
 * @throws IOException
 * @throws InterruptedException
 */
protected final void testOutput(File f, int num) throws IOException, InterruptedException {
    // Prepare configuration
    Configuration config = this.prepareConfiguration();

    // Set up fake job
    OutputFormat<NullWritable, T> outputFormat = this.getOutputFormat();
    Job job = Job.getInstance(config);
    job.setOutputFormatClass(outputFormat.getClass());
    this.addOutputPath(f, job.getConfiguration(), job);
    JobContext context = new JobContextImpl(job.getConfiguration(), job.getJobID());
    Assert.assertNotNull(FileOutputFormat.getOutputPath(context));

    // Output the data
    TaskAttemptID id = new TaskAttemptID("outputTest", 1, TaskType.MAP, 1, 1);
    TaskAttemptContext taskContext = new TaskAttemptContextImpl(job.getConfiguration(), id);
    RecordWriter<NullWritable, T> writer = outputFormat.getRecordWriter(taskContext);
    Iterator<T> tuples = this.generateTuples(num);
    while (tuples.hasNext()) {
        writer.write(NullWritable.get(), tuples.next());
    }
    writer.close(taskContext);

    // Check output
    File outputFile = this.findOutputFile(this.folder.getRoot(), context);
    Assert.assertNotNull(outputFile);
    this.checkTuples(outputFile, num);
}

From source file:org.apache.jena.hadoop.rdf.io.RdfTriplesInputTestMapper.java

License:Apache License

@Override
protected void map(LongWritable key, TripleWritable value, Context context)
        throws IOException, InterruptedException {
    LOG.info("Line " + key.toString() + " => " + value.toString());
    context.write(NullWritable.get(), value);
}

From source file:org.apache.jena.hadoop.rdf.mapreduce.characteristics.AbstractCharacteristicSetGeneratingReducer.java

License:Apache License

/**
 * Output all sets of a given size/*from  ww w  .  ja  v  a  2  s . co m*/
 * 
 * @param cs
 *            Characteristics
 * @param perSet
 *            Set size
 * @param context
 *            Context to output sets to
 * @throws IOException
 * @throws InterruptedException
 */
protected void outputSets(List<CharacteristicWritable> cs, int perSet, Context context)
        throws IOException, InterruptedException {
    if (perSet == 1) {
        for (CharacteristicWritable c : cs) {
            CharacteristicSetWritable set = new CharacteristicSetWritable(c);
            context.write(set, NullWritable.get());
            if (this.tracing) {
                LOG.trace("Key = {}", set);
            }
        }
    } else if (perSet == cs.size()) {
        CharacteristicSetWritable set = new CharacteristicSetWritable();
        for (CharacteristicWritable c : cs) {
            set.add(c);
        }
        context.write(set, NullWritable.get());
        if (this.tracing) {
            LOG.trace("Key = {}", set);
        }
    } else {
        CharacteristicWritable[] members = new CharacteristicWritable[perSet];
        this.combinations(cs, perSet, 0, members, context);
    }
}

From source file:org.apache.jena.hadoop.rdf.mapreduce.characteristics.AbstractCharacteristicSetGeneratingReducer.java

License:Apache License

/**
 * Calculate all available combinations of N elements from the given
 * characteristics/*from  www. ja va  2  s.co m*/
 * 
 * @param cs
 *            Characteristics
 * @param len
 *            Desired number of elements
 * @param startPosition
 *            Start position
 * @param result
 *            Result array to fill
 * @param context
 *            Context to write completed combinations to
 * @throws IOException
 * @throws InterruptedException
 */
protected final void combinations(List<CharacteristicWritable> cs, int len, int startPosition,
        CharacteristicWritable[] result, Context context) throws IOException, InterruptedException {
    if (len == 0) {
        CharacteristicSetWritable set = new CharacteristicSetWritable(result);
        context.write(set, NullWritable.get());
        if (this.tracing) {
            LOG.trace("Key = {}", set);
        }
        return;
    }
    for (int i = startPosition; i <= cs.size() - len; i++) {
        result[result.length - len] = cs.get(i);
        combinations(cs, len - 1, i + 1, result, context);
    }
}

From source file:org.apache.jena.hadoop.rdf.mapreduce.characteristics.AbstractCharacteristicSetGeneratingReducerTests.java

License:Apache License

/**
 * Creates a set consisting of the given predicates
 * /*from   ww w  .j  a  v a2 s .com*/
 * @param predicates
 *            Predicates
 * @return Set
 */
protected CharacteristicSetWritable createSet(
        MapReduceDriver<LongWritable, T, NodeWritable, T, CharacteristicSetWritable, NullWritable> driver,
        int occurrences, String... predicates) {
    CharacteristicSetWritable set = new CharacteristicSetWritable();
    for (String predicateUri : predicates) {
        set.add(new CharacteristicWritable(NodeFactory.createURI(predicateUri)));
    }
    for (int i = 1; i <= occurrences; i++) {
        driver.addOutput(set, NullWritable.get());
    }
    return set;
}

From source file:org.apache.jena.hadoop.rdf.mapreduce.characteristics.CharacteristicSetReducer.java

License:Apache License

@Override
protected void reduce(CharacteristicSetWritable key, Iterable<CharacteristicSetWritable> values,
        Context context) throws IOException, InterruptedException {
    Iterator<CharacteristicSetWritable> iter = values.iterator();
    CharacteristicSetWritable output = new CharacteristicSetWritable(0);

    if (this.tracing) {
        LOG.trace("Key = {}", key);
    }//from   w  w  w.jav  a  2  s. co  m

    while (iter.hasNext()) {
        CharacteristicSetWritable set = iter.next();
        if (this.tracing) {
            LOG.trace("Value = {}", set);
        }
        output.add(set);
    }

    context.write(output, NullWritable.get());
}

From source file:org.apache.jena.hadoop.rdf.mapreduce.characteristics.CharacteristicSetReducerTest.java

License:Apache License

/**
 * Creates a set consisting of the given predicates
 * // w w  w.j av  a  2s.c  o  m
 * @param predicates
 *            Predicates
 * @return Set
 */
protected CharacteristicSetWritable createSet(
        MapReduceDriver<CharacteristicSetWritable, CharacteristicSetWritable, CharacteristicSetWritable, CharacteristicSetWritable, CharacteristicSetWritable, NullWritable> driver,
        int inputOccurrences, int outputOccurrences, String... predicates) {
    CharacteristicSetWritable set = new CharacteristicSetWritable();
    for (String predicateUri : predicates) {
        set.add(new CharacteristicWritable(NodeFactory.createURI(predicateUri)));
    }
    for (int i = 1; i <= inputOccurrences; i++) {
        driver.addInput(set, set);
    }
    for (int i = 1; i <= outputOccurrences; i++) {
        driver.addOutput(set, NullWritable.get());
    }
    return set;
}

From source file:org.apache.jena.hadoop.rdf.mapreduce.KeyPlusNullMapper.java

License:Apache License

@Override
protected void map(TKey key, TValue value, Context context) throws IOException, InterruptedException {
    if (this.tracing) {
        LOG.trace("Key = {}", key);
    }/*from   ww  w.  java2s.  c  o m*/
    context.write(key, NullWritable.get());
}

From source file:org.apache.jena.hadoop.rdf.mapreduce.NullPlusKeyMapper.java

License:Apache License

@Override
protected void map(TKey key, TValue value, Context context) throws IOException, InterruptedException {
    if (this.tracing) {
        LOG.trace("Key = {}", key);
    }//ww  w . jav  a 2s  . c o m
    context.write(NullWritable.get(), key);
}

From source file:org.apache.jena.hadoop.rdf.mapreduce.NullPlusKeyReducer.java

License:Apache License

@Override
protected void reduce(TKey key, Iterable<TValue> values, Context context)
        throws IOException, InterruptedException {
    if (this.tracing) {
        LOGGER.trace("Input Key = {}", key);
        Iterator<TValue> iter = values.iterator();
        while (iter.hasNext()) {
            LOGGER.trace("Input Value = {}", iter.next());
        }/*from   www  .  j  a v  a2  s. co  m*/
    }
    context.write(NullWritable.get(), key);
}