Example usage for org.apache.hadoop.io NullWritable get

List of usage examples for org.apache.hadoop.io NullWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io NullWritable get.

Prototype

public static NullWritable get() 

Source Link

Document

Returns the single instance of this class.

Usage

From source file:org.apache.mahout.graph.common.SimplifyGraphJobTest.java

License:Apache License

@Test
public void testEdgeMapping() throws Exception {
    Mapper.Context ctx = EasyMock.createMock(Mapper.Context.class);

    ctx.write(new UndirectedEdge(new Vertex(123L), new Vertex(456L)), NullWritable.get());

    EasyMock.replay(ctx);/*  w  ww.j  av a2  s .  com*/

    new SimplifyGraphJob.SimplifyGraphMapper().map(null, new Text("123,456"), ctx);

    EasyMock.verify(ctx);
}

From source file:org.apache.mahout.graph.common.SimplifyGraphJobTest.java

License:Apache License

@Test
public void testAggregation() throws Exception {
    Reducer.Context ctx = EasyMock.createMock(Reducer.Context.class);
    UndirectedEdge edge = new UndirectedEdge(new Vertex(123L), new Vertex(456L));

    ctx.write(edge, NullWritable.get());

    EasyMock.replay(ctx);//from ww w  . jav a 2 s  .  c o m

    new SimplifyGraphJob.SimplifyGraphReducer().reduce(edge,
            Arrays.asList(NullWritable.get(), NullWritable.get()), ctx);

    EasyMock.verify(ctx);
}

From source file:org.apache.mahout.graph.components.TestFindKTrussesJob.java

License:Apache License

@SuppressWarnings({ "unchecked", "rawtypes" })
@Test//from  w  w  w.  j  av a 2 s. c  om
public void testSplitTrianglesToEdges() throws Exception {

    Mapper.Context ctx = EasyMock.createMock(Mapper.Context.class);

    ctx.write(new UndirectedEdge(1, 2), ONE);
    ctx.write(new UndirectedEdge(1, 3), ONE);
    //    ctx.write(new UndirectedEdge(2, 3), new IntWritable(2));
    ctx.write(new UndirectedEdge(2, 3), ONE);
    ctx.write(new UndirectedEdge(2, 3), ONE);
    ctx.write(new UndirectedEdge(2, 4), ONE);
    ctx.write(new UndirectedEdge(3, 4), ONE);

    EasyMock.replay(ctx);

    new FindKTrussesJob.SplitTrianglesToEdgesMapper().map(new Triangle(1, 2, 3), NullWritable.get(), ctx);
    new FindKTrussesJob.SplitTrianglesToEdgesMapper().map(new Triangle(2, 3, 4), NullWritable.get(), ctx);

    EasyMock.verify(ctx);
}

From source file:org.apache.mahout.graph.GraphTestCase.java

License:Apache License

protected <T extends WritableComparable> void writeComponents(File destination, Configuration conf,
        Class<T> componentClass, T... components) throws IOException {
    Path path = new Path(destination.getAbsolutePath());
    FileSystem fs = FileSystem.get(path.toUri(), conf);

    SequenceFile.Writer writer = new SequenceFile.Writer(fs, conf, path, componentClass, NullWritable.class);
    try {/* w w  w.  j a va2 s. c om*/
        for (T component : components) {
            writer.append(component, NullWritable.get());
        }
    } finally {
        Closeables.closeQuietly(writer);
    }
}

From source file:org.apache.mahout.graph.triangles.EnumerateTrianglesJobTest.java

License:Apache License

@Test
public void testJoinTriangles() throws Exception {
    Reducer.Context ctx = EasyMock.createMock(Reducer.Context.class);

    ctx.write(new Triangle(0, 1, 2), NullWritable.get());
    ctx.write(new Triangle(0, 2, 3), NullWritable.get());

    EasyMock.replay(ctx);//from   w ww. j  a va 2 s.c  o  m

    new EnumerateTrianglesJob.JoinTrianglesReducer().reduce(new JoinableUndirectedEdge(0, 2, true),
            Arrays.asList(VertexOrMarker.MARKER, new VertexOrMarker(1), new VertexOrMarker(3)), ctx);

    EasyMock.verify(ctx);
}

From source file:org.apache.mahout.graph.triangles.EnumerateTrianglesJobTest.java

License:Apache License

@Test
public void toyIntegrationTest() throws Exception {
    File inputFile = getTestTempFile("edgesWithDegrees.seq");
    File outputDir = getTestTempDir("output");
    outputDir.delete();/*from w  w w .  ja  va 2 s. c  om*/
    File tempDir = getTestTempDir("tmp");

    Configuration conf = new Configuration();
    FileSystem fs = FileSystem.get(conf);

    SequenceFile.Writer writer = new SequenceFile.Writer(fs, conf,
            new Path(inputFile.getAbsolutePath().toString()), UndirectedEdgeWithDegrees.class,
            NullWritable.class);

    try {
        writer.append(new UndirectedEdgeWithDegrees(0, 7, 1, 3), NullWritable.get());
        writer.append(new UndirectedEdgeWithDegrees(0, 7, 2, 3), NullWritable.get());
        writer.append(new UndirectedEdgeWithDegrees(0, 7, 3, 3), NullWritable.get());
        writer.append(new UndirectedEdgeWithDegrees(0, 7, 4, 3), NullWritable.get());
        writer.append(new UndirectedEdgeWithDegrees(0, 7, 5, 2), NullWritable.get());
        writer.append(new UndirectedEdgeWithDegrees(0, 7, 6, 1), NullWritable.get());
        writer.append(new UndirectedEdgeWithDegrees(0, 7, 7, 2), NullWritable.get());
        writer.append(new UndirectedEdgeWithDegrees(1, 3, 2, 3), NullWritable.get());
        writer.append(new UndirectedEdgeWithDegrees(1, 3, 3, 3), NullWritable.get());
        writer.append(new UndirectedEdgeWithDegrees(2, 3, 3, 3), NullWritable.get());
        writer.append(new UndirectedEdgeWithDegrees(4, 3, 5, 2), NullWritable.get());
        writer.append(new UndirectedEdgeWithDegrees(4, 3, 7, 2), NullWritable.get());
    } finally {
        Closeables.closeQuietly(writer);
    }

    EnumerateTrianglesJob enumerateTrianglesJob = new EnumerateTrianglesJob();
    enumerateTrianglesJob.setConf(conf);
    enumerateTrianglesJob.run(new String[] { "--input", inputFile.getAbsolutePath(), "--output",
            outputDir.getAbsolutePath(), "--tempDir", tempDir.getAbsolutePath() });

    Set<Triangle> triangles = Sets.newHashSet();
    for (Pair<Triangle, NullWritable> result : new SequenceFileIterable<Triangle, NullWritable>(
            new Path(outputDir.getAbsolutePath() + "/part-r-00000"), false, conf)) {
        triangles.add(result.getFirst());
    }

    assertEquals(6, triangles.size());
    assertTrue(triangles.contains(new Triangle(0, 1, 2)));
    assertTrue(triangles.contains(new Triangle(0, 1, 3)));
    assertTrue(triangles.contains(new Triangle(0, 2, 3)));
    assertTrue(triangles.contains(new Triangle(0, 4, 5)));
    assertTrue(triangles.contains(new Triangle(0, 4, 7)));
    assertTrue(triangles.contains(new Triangle(1, 2, 3)));
}

From source file:org.apache.mahout.math.hadoop.stochasticsvd.qr.QRFirstStep.java

License:Apache License

private void outputQHat(DenseBlockWritable value) throws IOException {
    qtHatOut.collect(NullWritable.get(), value);
}

From source file:org.apache.mahout.math.hadoop.stochasticsvd.qr.QRFirstStep.java

License:Apache License

private void outputR(VectorWritable value) throws IOException {
    rHatOut.collect(NullWritable.get(), value);
}

From source file:org.apache.mahout.math.hadoop.TimesSquaredJob.java

License:Apache License

public static Job createTimesSquaredJob(Configuration initialConf, Vector v, int outputVectorDim,
        Path matrixInputPath, Path outputVectorPathBase, Class<? extends TimesSquaredMapper> mapClass,
        Class<? extends VectorSummingReducer> redClass) throws IOException {

    FileSystem fs = FileSystem.get(matrixInputPath.toUri(), initialConf);
    matrixInputPath = fs.makeQualified(matrixInputPath);
    outputVectorPathBase = fs.makeQualified(outputVectorPathBase);

    long now = System.nanoTime();
    Path inputVectorPath = new Path(outputVectorPathBase, INPUT_VECTOR + '/' + now);

    SequenceFile.Writer inputVectorPathWriter = null;

    try {/* w w w. j av a  2 s.c o  m*/
        inputVectorPathWriter = new SequenceFile.Writer(fs, initialConf, inputVectorPath, NullWritable.class,
                VectorWritable.class);
        inputVectorPathWriter.append(NullWritable.get(), new VectorWritable(v));
    } finally {
        Closeables.close(inputVectorPathWriter, false);
    }

    URI ivpURI = inputVectorPath.toUri();
    DistributedCache.setCacheFiles(new URI[] { ivpURI }, initialConf);

    Job job = HadoopUtil.prepareJob(matrixInputPath, new Path(outputVectorPathBase, OUTPUT_VECTOR_FILENAME),
            SequenceFileInputFormat.class, mapClass, NullWritable.class, VectorWritable.class, redClass,
            NullWritable.class, VectorWritable.class, SequenceFileOutputFormat.class, initialConf);
    job.setCombinerClass(redClass);
    job.setJobName("TimesSquaredJob: " + matrixInputPath);

    Configuration conf = job.getConfiguration();
    conf.set(INPUT_VECTOR, ivpURI.toString());
    conf.setBoolean(IS_SPARSE_OUTPUT, !v.isDense());
    conf.setInt(OUTPUT_VECTOR_DIMENSION, outputVectorDim);

    return job;
}

From source file:org.apache.mahout.math.stats.entropy.CalculateEntropyMapper.java

License:Apache License

@Override
protected void map(Text key, VarIntWritable value, Context context) throws IOException, InterruptedException {
    result.set(value.get() * Math.log(value.get()));
    context.write(NullWritable.get(), result);
}