Example usage for org.apache.hadoop.io DoubleWritable DoubleWritable

List of usage examples for org.apache.hadoop.io DoubleWritable DoubleWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io DoubleWritable DoubleWritable.

Prototype

public DoubleWritable(double value) 

Source Link

Usage

From source file:org.apache.hama.graph.TestAbsDiffAggregator.java

License:Apache License

@Test
public void testAggregator() {
    AbsDiffAggregator diff = new AbsDiffAggregator();
    diff.aggregate(new DoubleWritable(5), new DoubleWritable(2));
    diff.aggregate(new DoubleWritable(5), new DoubleWritable(2));
    diff.aggregate(null, new DoubleWritable(5));

    // 0, because this is totally worthless for diffs
    assertEquals(0, diff.getTimesAggregated().get());
    assertEquals(6, (int) diff.getValue().get());

}

From source file:org.apache.hama.graph.TestAverageAggregator.java

License:Apache License

@Test
public void testAggregator() {
    AverageAggregator diff = new AverageAggregator();
    diff.aggregate(new DoubleWritable(5), new DoubleWritable(2));
    diff.aggregateInternal();//from w ww . j av  a2s  .  c o  m
    diff.aggregate(new DoubleWritable(5), new DoubleWritable(2));
    diff.aggregateInternal();
    diff.aggregate(null, new DoubleWritable(5));
    diff.aggregateInternal();

    assertEquals(3, diff.getTimesAggregated().get());
    DoubleWritable x = diff.finalizeAggregation();
    assertEquals(2, (int) x.get());

}

From source file:org.apache.hama.graph.TestDiskVerticesInfo.java

License:Apache License

@Test
public void testDiskVerticesInfoLifeCycle() throws Exception {
    DiskVerticesInfo<Text, NullWritable, DoubleWritable> info = new DiskVerticesInfo<Text, NullWritable, DoubleWritable>();
    HamaConfiguration conf = new HamaConfiguration();
    conf.set(GraphJob.VERTEX_CLASS_ATTR, PageRankVertex.class.getName());
    conf.set(GraphJob.VERTEX_EDGE_VALUE_CLASS_ATTR, NullWritable.class.getName());
    conf.set(GraphJob.VERTEX_ID_CLASS_ATTR, Text.class.getName());
    conf.set(GraphJob.VERTEX_VALUE_CLASS_ATTR, DoubleWritable.class.getName());
    GraphJobRunner.<Text, NullWritable, DoubleWritable>initClasses(conf);
    TaskAttemptID attempt = new TaskAttemptID("omg", 1, 1, 0);
    try {/*ww w  .  j  a  v  a  2  s. c  o  m*/
        ArrayList<PageRankVertex> list = new ArrayList<PageRankVertex>();

        for (int i = 0; i < 10; i++) {
            PageRankVertex v = new PageRank.PageRankVertex();
            v.setVertexID(new Text(i + ""));
            if (i % 2 == 0) {
                v.setValue(new DoubleWritable(i * 2));
            }
            v.addEdge(new Edge<Text, NullWritable>(new Text((10 - i) + ""), null));

            list.add(v);
        }

        info.init(null, conf, attempt);
        for (PageRankVertex v : list) {
            info.addVertex(v);
        }

        info.finishAdditions();
        // implicitly finish the superstep here as the new softfile must be
        // generated, the currentStep must be incremented etc.
        info.finishSuperstep();

        assertEquals(10, info.size());
        // no we want to iterate and check if the result can properly be obtained
        info.startSuperstep();
        int index = 0;
        IDSkippingIterator<Text, NullWritable, DoubleWritable> iterator = info.skippingIterator();
        while (iterator.hasNext()) {
            Vertex<Text, NullWritable, DoubleWritable> next = iterator.next();
            PageRankVertex pageRankVertex = list.get(index);
            assertEquals(pageRankVertex.getVertexID().toString(), next.getVertexID().toString());
            if (index % 2 == 0) {
                assertEquals((int) next.getValue().get(), index * 2);
            } else {
                assertNull(next.getValue());
            }
            assertEquals(next.isHalted(), false);
            // check edges
            List<Edge<Text, NullWritable>> edges = next.getEdges();
            assertEquals(1, edges.size());
            Edge<Text, NullWritable> edge = edges.get(0);
            assertEquals(pageRankVertex.getEdges().get(0).getDestinationVertexID().toString(),
                    edge.getDestinationVertexID().toString());
            assertNull(edge.getValue());
            info.finishVertexComputation(next);
            index++;
        }
        assertEquals(index, list.size());
        info.finishSuperstep();
        // iterate again and compute so vertices change internally
        info.startSuperstep();
        iterator = info.skippingIterator();
        while (iterator.hasNext()) {
            Vertex<Text, NullWritable, DoubleWritable> next = iterator.next();
            // override everything with constant 2
            next.setValue(new DoubleWritable(2));
            if (Integer.parseInt(next.getVertexID().toString()) == 3) {
                next.voteToHalt();
            }
            info.finishVertexComputation(next);
        }
        info.finishSuperstep();

        index = 0;
        // now reread
        info.startSuperstep();
        iterator = info.skippingIterator();
        while (iterator.hasNext()) {
            Vertex<Text, NullWritable, DoubleWritable> next = iterator.next();
            PageRankVertex pageRankVertex = list.get(index);
            assertEquals(pageRankVertex.getVertexID().toString(), next.getVertexID().toString());
            assertEquals((int) next.getValue().get(), 2);
            // check edges
            List<Edge<Text, NullWritable>> edges = next.getEdges();
            assertEquals(1, edges.size());
            Edge<Text, NullWritable> edge = edges.get(0);
            assertEquals(pageRankVertex.getEdges().get(0).getDestinationVertexID().toString(),
                    edge.getDestinationVertexID().toString());
            assertNull(edge.getValue());
            if (index == 3) {
                assertEquals(true, next.isHalted());
            }

            index++;
        }
        assertEquals(index, list.size());

    } finally {
        info.cleanup(conf, attempt);
    }

}

From source file:org.apache.hama.graph.TestOffHeapVerticesInfo.java

License:Apache License

@Test
public void testOffHeapVerticesInfoLifeCycle() throws Exception {
    OffHeapVerticesInfo<Text, NullWritable, DoubleWritable> info = new OffHeapVerticesInfo<Text, NullWritable, DoubleWritable>();
    HamaConfiguration conf = new HamaConfiguration();
    conf.set(GraphJob.VERTEX_CLASS_ATTR, PageRankVertex.class.getName());
    conf.set(GraphJob.VERTEX_EDGE_VALUE_CLASS_ATTR, NullWritable.class.getName());
    conf.set(GraphJob.VERTEX_ID_CLASS_ATTR, Text.class.getName());
    conf.set(GraphJob.VERTEX_VALUE_CLASS_ATTR, DoubleWritable.class.getName());
    GraphJobRunner.<Text, NullWritable, DoubleWritable>initClasses(conf);
    TaskAttemptID attempt = new TaskAttemptID("123", 1, 1, 0);
    try {/*www .j  a  v  a 2  s  .  c  o  m*/
        ArrayList<PageRankVertex> list = new ArrayList<PageRankVertex>();

        for (int i = 0; i < 10; i++) {
            PageRankVertex v = new PageRankVertex();
            v.setVertexID(new Text(i + ""));
            if (i % 2 == 0) {
                v.setValue(new DoubleWritable(i * 2));
            }
            v.addEdge(new Edge<Text, NullWritable>(new Text((10 - i) + ""), null));

            list.add(v);
        }

        info.init(null, conf, attempt);
        for (PageRankVertex v : list) {
            info.addVertex(v);
        }

        info.finishAdditions();

        assertEquals(10, info.size());
        // no we want to iterate and check if the result can properly be obtained

        int index = 0;
        IDSkippingIterator<Text, NullWritable, DoubleWritable> iterator = info.skippingIterator();
        while (iterator.hasNext()) {
            Vertex<Text, NullWritable, DoubleWritable> next = iterator.next();
            PageRankVertex pageRankVertex = list.get(index);
            assertEquals(pageRankVertex.getVertexID().toString(), next.getVertexID().toString());
            if (index % 2 == 0) {
                assertEquals((int) next.getValue().get(), index * 2);
            } else {
                assertNull(next.getValue());
            }
            assertEquals(next.isHalted(), false);
            // check edges
            List<Edge<Text, NullWritable>> edges = next.getEdges();
            assertEquals(1, edges.size());
            Edge<Text, NullWritable> edge = edges.get(0);
            assertEquals(pageRankVertex.getEdges().get(0).getDestinationVertexID().toString(),
                    edge.getDestinationVertexID().toString());
            assertNull(edge.getValue());

            index++;
        }
        assertEquals(index, list.size());
        info.finishSuperstep();
        // iterate again and compute so vertices change internally
        iterator = info.skippingIterator();
        info.startSuperstep();
        while (iterator.hasNext()) {
            Vertex<Text, NullWritable, DoubleWritable> next = iterator.next();
            // override everything with constant 2
            next.setValue(new DoubleWritable(2));
            if (Integer.parseInt(next.getVertexID().toString()) == 3) {
                next.voteToHalt();
            }
            info.finishVertexComputation(next);
        }
        info.finishSuperstep();
        assertEquals(index, list.size());

    } finally {
        info.cleanup(conf, attempt);
    }

}

From source file:org.apache.hama.graph.TestOffHeapVerticesInfo.java

License:Apache License

@Test
public void testMassiveAdditionWithDefaults() throws Exception {
    OffHeapVerticesInfo<Text, NullWritable, DoubleWritable> verticesInfo = new OffHeapVerticesInfo<Text, NullWritable, DoubleWritable>();
    HamaConfiguration conf = new HamaConfiguration();
    verticesInfo.init(null, conf, null);
    assertEquals("vertices info size should be 0 at startup", 0, verticesInfo.size());
    Random r = new Random();
    int i = 10000;
    for (int n = 0; n < i; n++) {
        Vertex<Text, NullWritable, DoubleWritable> vertex = new PageRankVertex();
        vertex.setVertexID(new Text(String.valueOf(r.nextInt())));
        vertex.setValue(new DoubleWritable(r.nextDouble()));
        verticesInfo.addVertex(vertex);//from w  ww . ja  v a  2s  .  co m
    }
    verticesInfo.finishAdditions();
    assertEquals("vertices info size is not correct", i, verticesInfo.size());
}

From source file:org.apache.hama.graph.TestSumAggregator.java

License:Apache License

@Test
public void testAggregator() {
    SumAggregator diff = new SumAggregator();
    diff.aggregate(new DoubleWritable(5));
    diff.aggregate(new DoubleWritable(5));
    assertEquals(10, (int) diff.getValue().get());

}

From source file:org.apache.hama.ml.recommendation.Preference.java

License:Apache License

public Preference(U userId, I itemId, double value) {
    this.userId = userId;
    this.itemId = itemId;
    this.value = new DoubleWritable(value);
}

From source file:org.apache.hama.ml.recommendation.Preference.java

License:Apache License

public void setValue(double value) {
    this.value = new DoubleWritable(value);
}

From source file:org.apache.hama.ml.regression.GradientDescentBSP.java

License:Apache License

@Override
public void bsp(BSPPeer<VectorWritable, DoubleWritable, VectorWritable, DoubleWritable, VectorWritable> peer)
        throws IOException, SyncException, InterruptedException {
    // 0a superstep: get initial theta
    getInitialTheta(peer);/* w ww  .  j a v  a2 s.c  o  m*/

    // 0b superstep: count items
    int itemCount = 0;
    while (peer.readNext() != null) {
        // increment counter
        itemCount++;
    }
    broadcastVector(peer, new double[] { itemCount });
    peer.sync();

    // aggregate number of items
    aggregateItemsNumber(peer, itemCount);

    peer.reopenInput();

    int iterations = 0;
    while (true) {

        // first superstep : calculate cost function in parallel
        double localCost = calculateLocalCost(peer);

        // cost is sent and aggregated by each
        broadcastVector(peer, new double[] { localCost });
        peer.sync();

        // second superstep : aggregate cost calculation
        double totalCost = aggregateTotalCost(peer, localCost);

        // cost check
        if (checkCost(peer, iterations, totalCost))
            break;

        peer.sync();
        peer.reopenInput();

        // third superstep : calculate partial derivatives' deltas in parallel
        double[] thetaDelta = calculatePartialDerivatives(peer);

        // send thetaDelta to the each peer
        broadcastVector(peer, thetaDelta);

        peer.sync();

        // fourth superstep : aggregate partial derivatives
        double[] newTheta = aggregatePartialDerivatives(peer, thetaDelta);

        // update theta
        updateTheta(newTheta);

        if (log.isDebugEnabled()) {
            log.debug("{}: new theta for cost {} is {}", new Object[] { peer.getPeerName(), cost, theta });
        }
        // master writes down the output
        if (master) {
            peer.write(new VectorWritable(theta), new DoubleWritable(cost));
        }

        peer.reopenInput();
        peer.sync();

        iterations++;
    }
}

From source file:org.apache.hama.ml.regression.GradientDescentBSP.java

License:Apache License

@Override
public void cleanup(
        BSPPeer<VectorWritable, DoubleWritable, VectorWritable, DoubleWritable, VectorWritable> peer)
        throws IOException {
    // master writes down the final output
    if (master) {
        peer.write(new VectorWritable(theta), new DoubleWritable(cost));
        if (log.isInfoEnabled()) {
            log.info("{}:computation finished with cost {} and theta {}",
                    new Object[] { peer.getPeerName(), cost, theta });
        }/* w  w w. j ava 2  s.  c om*/
    }
}