Example usage for org.apache.hadoop.io NullWritable get

List of usage examples for org.apache.hadoop.io NullWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io NullWritable get.

Prototype

public static NullWritable get() 

Source Link

Document

Returns the single instance of this class.

Usage

From source file:org.apache.crunch.types.writable.WritablesTest.java

License:Apache License

@Test
public void testNulls() throws Exception {
    Void n = null;
    NullWritable nw = NullWritable.get();
    testInputOutputFn(Writables.nulls(), n, nw);
}

From source file:org.apache.crunch.types.writable.WritableValueConverter.java

License:Apache License

@Override
public Object outputKey(W value) {
    return NullWritable.get();
}

From source file:org.apache.druid.data.input.avro.AvroValueRecordReader.java

License:Apache License

/**
 * {@inheritDoc}
 */
@Override
public NullWritable getCurrentKey() {
    return NullWritable.get();
}

From source file:org.apache.druid.indexer.HadoopDruidIndexerMapperTest.java

License:Apache License

@Test
public void testHadoopyStringParserWithTransformSpec() throws Exception {
    final HadoopDruidIndexerConfig config = new HadoopDruidIndexerConfig(
            new HadoopIngestionSpec(
                    DATA_SCHEMA//from w w  w .j ava 2 s  .  co  m
                            .withTransformSpec(
                                    new TransformSpec(new SelectorDimFilter("dim1", "foo", null),
                                            ImmutableList.of(new ExpressionTransform("dim1t",
                                                    "concat(dim1,dim1)", ExprMacroTable.nil())))),
                    IO_CONFIG, TUNING_CONFIG));

    final MyMapper mapper = new MyMapper();
    final Configuration hadoopConfig = new Configuration();
    hadoopConfig.set(HadoopDruidIndexerConfig.CONFIG_PROPERTY,
            HadoopDruidIndexerConfig.JSON_MAPPER.writeValueAsString(config));
    final Mapper.Context mapContext = EasyMock.mock(Mapper.Context.class);
    EasyMock.expect(mapContext.getConfiguration()).andReturn(hadoopConfig).once();
    EasyMock.expect(mapContext.getCounter(HadoopDruidIndexerConfig.IndexJobCounters.ROWS_THROWN_AWAY_COUNTER))
            .andReturn(getTestCounter());
    EasyMock.replay(mapContext);
    mapper.setup(mapContext);
    final List<Map<String, Object>> rows = ImmutableList.of(
            ImmutableMap.of("t", "2000-01-01T00:00:00.000Z", "dim1", "foo", "dim2", "x", "m1", 1.0),
            ImmutableMap.of("t", "2000-01-01T00:00:00.000Z", "dim1", "bar", "dim2", "y", "m1", 1.0),
            ImmutableMap.of("t", "2000-01-01T00:00:00.000Z", "dim1", "foo", "dim2", "z", "m1", 1.0));
    for (Map<String, Object> row : rows) {
        mapper.map(NullWritable.get(), new Text(JSON_MAPPER.writeValueAsString(row)), mapContext);
    }
    assertRowListEquals(ImmutableList.of(
            ImmutableMap.of("t", "2000-01-01T00:00:00.000Z", "dim1", "foo", "dim1t", "foofoo", "dim2", "x",
                    "m1", 1.0),
            ImmutableMap.of("t", "2000-01-01T00:00:00.000Z", "dim1", "foo", "dim1t", "foofoo", "dim2", "z",
                    "m1", 1.0)),
            mapper.getRows());
}

From source file:org.apache.flink.test.hadoop.mapred.HadoopIOFormatsITCase.java

License:Apache License

@Override
protected void preSubmit() throws Exception {
    resultPath = new String[] { getTempDirPath("result0"), getTempDirPath("result1") };

    File sequenceFile = createAndRegisterTempFile("seqFile");
    sequenceFileInPath = sequenceFile.toURI().toString();

    // Create a sequence file
    org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
    FileSystem fs = FileSystem.get(URI.create(sequenceFile.getAbsolutePath()), conf);
    Path path = new Path(sequenceFile.getAbsolutePath());

    //  ------------------ Long / Text Key Value pair: ------------
    int kvCount = 4;

    LongWritable key = new LongWritable();
    Text value = new Text();
    SequenceFile.Writer writer = null;
    try {/*from ww  w . ja v a  2  s .  co m*/
        writer = SequenceFile.createWriter(fs, conf, path, key.getClass(), value.getClass());
        for (int i = 0; i < kvCount; i++) {
            if (i == 1) {
                // write key = 0 a bit more often.
                for (int a = 0; a < 15; a++) {
                    key.set(i);
                    value.set(i + " - somestring");
                    writer.append(key, value);
                }
            }
            key.set(i);
            value.set(i + " - somestring");
            writer.append(key, value);
        }
    } finally {
        IOUtils.closeStream(writer);
    }

    //  ------------------ Long / Text Key Value pair: ------------

    File sequenceFileNull = createAndRegisterTempFile("seqFileNullKey");
    sequenceFileInPathNull = sequenceFileNull.toURI().toString();
    path = new Path(sequenceFileInPathNull);

    LongWritable value1 = new LongWritable();
    SequenceFile.Writer writer1 = null;
    try {
        writer1 = SequenceFile.createWriter(fs, conf, path, NullWritable.class, value1.getClass());
        for (int i = 0; i < kvCount; i++) {
            value1.set(i);
            writer1.append(NullWritable.get(), value1);
        }
    } finally {
        IOUtils.closeStream(writer1);
    }
}

From source file:org.apache.flume.channel.recoverable.memory.wal.WAL.java

License:Apache License

public void writeSequenceIDs(List<Long> sequenceIDs) throws IOException {
    Preconditions.checkNotNull(sequenceIDWALWriter, "Write is null, close must have been called");
    synchronized (this) {
        if (isRollRequired()) {
            roll();/*from w  w  w. ja va 2 s. co  m*/
        }
    }
    waitWhileRolling();
    boolean error = true;
    try {
        List<WALEntry<NullWritable>> entries = Lists.newArrayList();
        for (Long sequenceID : sequenceIDs) {
            largestCommitedSequenceID.set(Math.max(sequenceID, largestCommitedSequenceID.get()));
            entries.add(new WALEntry<NullWritable>(NullWritable.get(), sequenceID));
            sequenceIDWALWriter.append(entries);
        }
        error = false;
    } finally {
        if (error) {
            rollRequired = true;
        }
    }
}

From source file:org.apache.giraph.block_app.framework.BlockApiHandleTest.java

License:Apache License

private static TestGraph<LongWritable, LongWritable, NullWritable> createTestGraph() {
    TestGraph<LongWritable, LongWritable, NullWritable> graph = new TestGraph<>(createConf());
    graph.addVertex(new LongWritable(1), new LongWritable());
    graph.addVertex(new LongWritable(2), new LongWritable());
    graph.addVertex(new LongWritable(3), new LongWritable());
    graph.addVertex(new LongWritable(4), new LongWritable());
    graph.addEdge(new LongWritable(1), new LongWritable(2), NullWritable.get());
    graph.addEdge(new LongWritable(2), new LongWritable(1), NullWritable.get());
    graph.addEdge(new LongWritable(2), new LongWritable(3), NullWritable.get());
    graph.addEdge(new LongWritable(3), new LongWritable(2), NullWritable.get());
    return graph;
}

From source file:org.apache.giraph.block_app.framework.BlockExecutionTest.java

License:Apache License

private static TestGraph<LongWritable, LongWritable, NullWritable> createTestGraph() {
    TestGraph<LongWritable, LongWritable, NullWritable> graph = new TestGraph<LongWritable, LongWritable, NullWritable>(
            createConf());//ww w. ja v  a  2s  . c o  m
    graph.addVertex(new LongWritable(1), new LongWritable());
    graph.addVertex(new LongWritable(2), new LongWritable());
    graph.addVertex(new LongWritable(3), new LongWritable());
    graph.addVertex(new LongWritable(4), new LongWritable());

    graph.addEdge(new LongWritable(1), new LongWritable(2), NullWritable.get());
    graph.addEdge(new LongWritable(2), new LongWritable(1), NullWritable.get());
    graph.addEdge(new LongWritable(2), new LongWritable(3), NullWritable.get());
    graph.addEdge(new LongWritable(3), new LongWritable(2), NullWritable.get());
    return graph;
}

From source file:org.apache.giraph.block_app.framework.TestWorkerMessages.java

License:Apache License

@Test
public void testWorkerMessages() throws Exception {
    GiraphConfiguration conf = new GiraphConfiguration();
    BlockUtils.setAndInitBlockFactoryClass(conf, TestWorkerMessagesBlockFactory.class);
    TestGraph testGraph = new TestGraph(conf);
    testGraph.addEdge(new LongWritable(1), new LongWritable(2), NullWritable.get());
    LocalBlockRunner.runApp(testGraph);/*from w w  w.  j  a  v  a  2 s  .  c o  m*/
}

From source file:org.apache.giraph.block_app.library.pagerank.PageRankInitializeAndNormalizeEdgesPiece.java

License:Apache License

@Override
public VertexSender<I, V, DoubleWritable> getVertexSender(
        final BlockWorkerSendApi<I, V, DoubleWritable, NullWritable> workerApi, Object executionStage) {
    final NullWritable reusableMessage = NullWritable.get();
    return vertex -> {
        if (vertex.getNumEdges() > 0) {
            // Normalize edge weights if vertex has out edges
            double weightSum = 0.0;
            for (Edge<I, DoubleWritable> edge : vertex.getEdges()) {
                weightSum += edge.getValue().get();
            }/*from   w ww  .j ava  2s  . c  o m*/
            for (MutableEdge<I, DoubleWritable> edge : vertex.getMutableEdges()) {
                edge.setValue(new DoubleWritable(edge.getValue().get() / weightSum));
            }
            // Make sure all the vertices are created
            workerApi.sendMessageToAllEdges(vertex, reusableMessage);
        }
    };
}