Example usage for org.apache.hadoop.io NullWritable get

List of usage examples for org.apache.hadoop.io NullWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io NullWritable get.

Prototype

public static NullWritable get() 

Source Link

Document

Returns the single instance of this class.

Usage

From source file:org.apache.mahout.math.stats.entropy.CalculateSpecificConditionalEntropyMapper.java

License:Apache License

@Override
protected void map(Text key, DoubleWritable value, Context context) throws IOException, InterruptedException {
    context.write(NullWritable.get(), value);
}

From source file:org.apache.mahout.text.LuceneSegmentRecordReaderTest.java

License:Apache License

@Test
public void testKey() throws Exception {
    for (SegmentCommitInfo segmentInfo : segmentInfos) {
        int docId = 0;
        LuceneSegmentInputSplit inputSplit = new LuceneSegmentInputSplit(getIndexPath1(), segmentInfo.info.name,
                segmentInfo.sizeInBytes());
        TaskAttemptContext context = getTaskAttemptContext(configuration, new TaskAttemptID());
        recordReader.initialize(inputSplit, context);
        for (int i = 0; i < 500; i++) {
            recordReader.nextKeyValue();
            //we can't be sure of the order we are getting the segments, so we have to fudge here a bit on the id, but it is either id: i or i + 500
            assertTrue("i = " + i + " docId= " + docId,
                    String.valueOf(docId).equals(recordReader.getCurrentKey().toString())
                            || String.valueOf(docId + 500).equals(recordReader.getCurrentKey().toString()));
            assertEquals(NullWritable.get(), recordReader.getCurrentValue());
            docId++;//from  w w  w  .  j  a  v  a  2 s.  c  om
        }
    }
}

From source file:org.apache.mnemonic.mapred.MneMapredBufferDataTest.java

License:Apache License

@Test(enabled = true)
public void testWriteBufferData() throws Exception {
    NullWritable nada = NullWritable.get();
    MneDurableOutputSession<DurableBuffer<?>> sess = new MneDurableOutputSession<DurableBuffer<?>>(null, m_conf,
            MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX);
    MneDurableOutputValue<DurableBuffer<?>> mdvalue = new MneDurableOutputValue<DurableBuffer<?>>(sess);
    OutputFormat<NullWritable, MneDurableOutputValue<DurableBuffer<?>>> outputFormat = new MneOutputFormat<MneDurableOutputValue<DurableBuffer<?>>>();
    RecordWriter<NullWritable, MneDurableOutputValue<DurableBuffer<?>>> writer = outputFormat
            .getRecordWriter(m_fs, m_conf, null, null);
    DurableBuffer<?> dbuf = null;//ww w . java 2s  . c o  m
    Checksum cs = new CRC32();
    cs.reset();
    for (int i = 0; i < m_reccnt; ++i) {
        dbuf = genupdDurableBuffer(sess, cs);
        Assert.assertNotNull(dbuf);
        writer.write(nada, mdvalue.of(dbuf));
    }
    m_checksum = cs.getValue();
    writer.close(null);
    sess.close();
}

From source file:org.apache.mnemonic.mapreduce.MneMapreduceBufferDataTest.java

License:Apache License

@Test(enabled = true)
public void testWriteBufferData() throws Exception {
    NullWritable nada = NullWritable.get();
    MneDurableOutputSession<DurableBuffer<?>> sess = new MneDurableOutputSession<DurableBuffer<?>>(m_tacontext,
            null, MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX);
    MneDurableOutputValue<DurableBuffer<?>> mdvalue = new MneDurableOutputValue<DurableBuffer<?>>(sess);
    OutputFormat<NullWritable, MneDurableOutputValue<DurableBuffer<?>>> outputFormat = new MneOutputFormat<MneDurableOutputValue<DurableBuffer<?>>>();
    RecordWriter<NullWritable, MneDurableOutputValue<DurableBuffer<?>>> writer = outputFormat
            .getRecordWriter(m_tacontext);
    DurableBuffer<?> dbuf = null;// w  w  w  .  java  2  s .c  o m
    Checksum cs = new CRC32();
    cs.reset();
    for (int i = 0; i < m_reccnt; ++i) {
        dbuf = genupdDurableBuffer(sess, cs);
        Assert.assertNotNull(dbuf);
        writer.write(nada, mdvalue.of(dbuf));
    }
    m_checksum = cs.getValue();
    writer.close(m_tacontext);
    sess.close();
}

From source file:org.apache.mnemonic.mapreduce.MneMapreduceChunkDataTest.java

License:Apache License

@Test(enabled = true)
public void testWriteChunkData() throws Exception {
    NullWritable nada = NullWritable.get();
    MneDurableOutputSession<DurableChunk<?>> sess = new MneDurableOutputSession<DurableChunk<?>>(m_tacontext,
            null, MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX);
    MneDurableOutputValue<DurableChunk<?>> mdvalue = new MneDurableOutputValue<DurableChunk<?>>(sess);
    OutputFormat<NullWritable, MneDurableOutputValue<DurableChunk<?>>> outputFormat = new MneOutputFormat<MneDurableOutputValue<DurableChunk<?>>>();
    RecordWriter<NullWritable, MneDurableOutputValue<DurableChunk<?>>> writer = outputFormat
            .getRecordWriter(m_tacontext);
    DurableChunk<?> dchunk = null;
    Checksum cs = new CRC32();
    cs.reset();/* www  . ja v  a 2  s . c  om*/
    for (int i = 0; i < m_reccnt; ++i) {
        dchunk = genupdDurableChunk(sess, cs);
        Assert.assertNotNull(dchunk);
        writer.write(nada, mdvalue.of(dchunk));
    }
    m_checksum = cs.getValue();
    writer.close(m_tacontext);
    sess.close();
}

From source file:org.apache.mnemonic.mapreduce.MneMapreduceLongDataTest.java

License:Apache License

@Test(enabled = true)
public void testWriteLongData() throws Exception {
    NullWritable nada = NullWritable.get();
    MneDurableOutputSession<Long> sess = new MneDurableOutputSession<Long>(m_tacontext, null,
            MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX);
    MneDurableOutputValue<Long> mdvalue = new MneDurableOutputValue<Long>(sess);
    OutputFormat<NullWritable, MneDurableOutputValue<Long>> outputFormat = new MneOutputFormat<MneDurableOutputValue<Long>>();
    RecordWriter<NullWritable, MneDurableOutputValue<Long>> writer = outputFormat.getRecordWriter(m_tacontext);
    Long val = null;
    for (int i = 0; i < m_reccnt; ++i) {
        val = m_rand.nextLong();
        m_sum += val;
        writer.write(nada, mdvalue.of(val));
    }//ww w. java  2 s  .c  o  m
    writer.close(m_tacontext);
    sess.close();
}

From source file:org.apache.mnemonic.mapreduce.MneMapreducePersonDataTest.java

License:Apache License

@Test(enabled = true)
public void testWritePersonData() throws Exception {
    NullWritable nada = NullWritable.get();
    MneDurableOutputSession<Person<Long>> sess = new MneDurableOutputSession<Person<Long>>(m_tacontext, null,
            MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX);
    MneDurableOutputValue<Person<Long>> mdvalue = new MneDurableOutputValue<Person<Long>>(sess);
    OutputFormat<NullWritable, MneDurableOutputValue<Person<Long>>> outputFormat = new MneOutputFormat<MneDurableOutputValue<Person<Long>>>();
    RecordWriter<NullWritable, MneDurableOutputValue<Person<Long>>> writer = outputFormat
            .getRecordWriter(m_tacontext);
    Person<Long> person = null;
    for (int i = 0; i < m_reccnt; ++i) {
        person = sess.newDurableObjectRecord();
        person.setAge((short) m_rand.nextInt(50));
        person.setName(String.format("Name: [%s]", Utils.genRandomString()), true);
        m_sumage += person.getAge();//from w  w w .j  a  v a2s  .c o m
        writer.write(nada, mdvalue.of(person));
    }
    writer.close(m_tacontext);
    sess.close();
}

From source file:org.apache.mrql.Bag.java

License:Apache License

/** add a new value to a Bag (cache it in memory if necessary)
 * @param x the new value/*  w  w w . j  a  v  a  2s. c om*/
 */
public void add(final MRData x) {
    materialize();
    if (!spilled() && Config.hadoop_mode && size() >= Config.max_materialized_bag)
        spill();
    if (spilled())
        try {
            if (writer == null) { // writer was closed earlier for reading
                FileSystem fs = FileSystem.getLocal(Plan.conf);
                writer = SequenceFile.createWriter(fs, Plan.conf, new Path(path), MRContainer.class,
                        NullWritable.class, SequenceFile.CompressionType.NONE);
                System.err.println("*** Appending elements to a spilled Bag: " + path);
            }
            ;
            writer.append(new MRContainer(x), NullWritable.get());
        } catch (IOException e) {
            throw new Error("Cannot append an element to a spilled Bag: " + path);
        }
    else
        content.add(x);
}

From source file:org.apache.mrql.Bag.java

License:Apache License

/** spill the Bag to a local file */
private void spill() {
    if (!spilled() && Config.hadoop_mode)
        try {//ww w  .j  a v  a 2s.  co m
            if (Plan.conf == null)
                Plan.conf = Evaluator.evaluator.new_configuration();
            final FileSystem fs = FileSystem.getLocal(Plan.conf);
            path = new_path(fs);
            System.err.println("*** Spilling a Bag to a local file: " + path);
            final Path p = new Path(path);
            writer = SequenceFile.createWriter(fs, Plan.conf, new Path(path), MRContainer.class,
                    NullWritable.class, SequenceFile.CompressionType.NONE);
            for (MRData e : this)
                writer.append(new MRContainer(e), NullWritable.get());
            mode = Modes.SPILLED;
            content = null;
            iterator = null;
        } catch (Exception e) {
            throw new Error("Cannot spill a Bag to a local file");
        }
}

From source file:org.apache.mrql.Bag.java

License:Apache License

/** return the Bag Iterator */
public Iterator<MRData> iterator() {
    if (spilled())
        try {// w w  w  .  ja  v  a2 s . com
            if (writer != null)
                writer.close();
            writer = null;
            return new BagIterator() {
                final FileSystem fs = FileSystem.getLocal(Plan.conf);
                final SequenceFile.Reader reader = new SequenceFile.Reader(fs, new Path(path), Plan.conf);
                final MRContainer key = new MRContainer();
                final NullWritable value = NullWritable.get();
                MRData data;

                public boolean hasNext() {
                    try {
                        if (!reader.next(key, value)) {
                            reader.close();
                            return false;
                        }
                        ;
                        data = key.data();
                        return true;
                    } catch (IOException e) {
                        throw new Error("Cannot collect values from a spilled Bag");
                    }
                }

                public MRData next() {
                    return data;
                }
            };
        } catch (IOException e) {
            throw new Error("Cannot collect values from a spilled Bag");
        }
    else if (materialized())
        return content.iterator();
    else {
        if (consumed) // this should never happen
            throw new Error("*** The collection stream has already been consumed");
        consumed = true;
        return iterator;
    }
}