Example usage for org.apache.hadoop.io DoubleWritable DoubleWritable

List of usage examples for org.apache.hadoop.io DoubleWritable DoubleWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io DoubleWritable DoubleWritable.

Prototype

public DoubleWritable(double value) 

Source Link

Usage

From source file:org.apache.giraph.edge.TestStrictGraphEdges.java

License:Apache License

private void testParallelEdgesClass(Class<? extends OutEdges> edgesClass) {
    OutEdges<LongWritable, DoubleWritable> edges = instantiateOutEdges(edgesClass);

    // Initial edges list contains parallel edges.
    List<Edge<LongWritable, DoubleWritable>> initialEdges = Lists.newArrayList(
            EdgeFactory.create(new LongWritable(1), new DoubleWritable(1)),
            EdgeFactory.create(new LongWritable(2), new DoubleWritable(2)),
            EdgeFactory.create(new LongWritable(3), new DoubleWritable(3)),
            EdgeFactory.create(new LongWritable(2), new DoubleWritable(20)));

    edges.initialize(initialEdges);/*  www  .  jav  a  2  s  .c  om*/

    // Only one of the two parallel edges should be left.
    assertEquals(3, edges.size());

    // Adding a parallel edge shouldn't change the number of edges.
    edges.add(EdgeFactory.create(new LongWritable(3), new DoubleWritable(30)));
    assertEquals(3, edges.size());
}

From source file:org.apache.giraph.edge.TestStrictRandomAccessEdges.java

License:Apache License

private void testParallelEdgesClass(Class<? extends StrictRandomAccessOutEdges> edgesClass) {
    StrictRandomAccessOutEdges<LongWritable, DoubleWritable> edges = (StrictRandomAccessOutEdges<LongWritable, DoubleWritable>) instantiateOutEdges(
            edgesClass);//w  w  w.  ja va  2s. co m

    // Initial edges list contains parallel edges.
    List<Edge<LongWritable, DoubleWritable>> initialEdges = Lists.newArrayList(
            EdgeFactory.create(new LongWritable(1), new DoubleWritable(1)),
            EdgeFactory.create(new LongWritable(2), new DoubleWritable(2)),
            EdgeFactory.create(new LongWritable(3), new DoubleWritable(3)),
            EdgeFactory.create(new LongWritable(2), new DoubleWritable(20)));

    edges.initialize(initialEdges);

    assertEquals(3.0, edges.getEdgeValue(new LongWritable(3)).get(), 0.0);
    assertNull(edges.getEdgeValue(new LongWritable(55)));

    edges.setEdgeValue(new LongWritable(2), new DoubleWritable(33.0));
    assertEquals(33.0, edges.getEdgeValue(new LongWritable(2)).get(), 0);
}

From source file:org.apache.giraph.examples.DeltaPageRankComputation.java

License:Apache License

@Override
public void compute(Vertex<LongWritable, DoubleWritable, NullWritable> vertex,
        Iterable<DoubleWritable> messages) throws IOException {

    // NOTE: We follow GraphLab's alternative way of computing PageRank,
    // which is to not divide by |V|. To get the probability value at
    // each vertex, take its PageRank value and divide by |V|.
    double delta = 0;

    if (getLogicalSuperstep() == 0) {
        vertex.getValue().set(0.0);/*from  w  w w . j a v a  2s .c  om*/
        delta = 0.15;
    }

    for (DoubleWritable message : messages) {
        delta += message.get();
    }
    vertex.getValue().set(vertex.getValue().get() + delta);

    if (getLogicalSuperstep() < MAX_SS.get(getConf()) && delta > 0) {
        sendMessageToAllEdges(vertex, new DoubleWritable(0.85 * delta / vertex.getNumEdges()));
    }

    // always vote to halt
    vertex.voteToHalt();
}

From source file:org.apache.giraph.examples.DeltaPageRankTolFinderComputation.java

License:Apache License

@Override
public void compute(Vertex<LongWritable, DoubleWritable, NullWritable> vertex,
        Iterable<DoubleWritable> messages) throws IOException {

    // NOTE: We follow GraphLab's alternative way of computing PageRank,
    // which is to not divide by |V|. To get the probability value at
    // each vertex, take its PageRank value and divide by |V|.
    double delta = 0;

    if (getLogicalSuperstep() == 0) {
        vertex.getValue().set(0.0);//from  www.  jav  a  2 s  .  c  om
        delta = 0.15;
    }

    for (DoubleWritable message : messages) {
        delta += message.get();
    }

    // Termination condition based on max supersteps
    if (getLogicalSuperstep() < MAX_SS.get(getConf()) && delta > 0) {
        vertex.getValue().set(vertex.getValue().get() + delta);
        sendMessageToAllEdges(vertex, new DoubleWritable(0.85 * delta / vertex.getNumEdges()));
    }

    aggregate(MAX_AGG, new DoubleWritable(delta));

    // always vote to halt
    vertex.voteToHalt();
}

From source file:org.apache.giraph.examples.DeltaTolPageRankComputation.java

License:Apache License

@Override
public void compute(Vertex<LongWritable, DoubleWritable, NullWritable> vertex,
        Iterable<DoubleWritable> messages) throws IOException {

    // YH: We'll use a trick to match how GraphLab async performs
    // PageRank w/ error tolerance termination.
    ///*from  ww w .  j  ava 2s  .c om*/
    // Unlike GraphLab async, which can directly pull from neighbours,
    // we always need to send messages to keep neighbours up-to-date.
    // However, this also wakes up neighbours, which is not desirable.
    //
    // So we use two types of messages:
    // - update + signal => do more work to help me converge
    //   (equivalent to GraphLab's scatter/signal)
    // - update only => here's my final delta, I'm done
    //   (implicit in GraphLab's gather)
    //
    // Since deltas are always positive, we use positive value for
    // update+signal and negative for update-only.

    // NOTE: We follow GraphLab's alternative way of computing PageRank,
    // which is to not divide by |V|. To get the probability value at
    // each vertex, take its PageRank value and divide by |V|.
    double delta = 0;
    boolean signalled = false;

    if (getLogicalSuperstep() == 0) {
        vertex.getValue().set(0.0);
        delta = 0.15;
        signalled = true;
    }

    for (DoubleWritable message : messages) {
        if (message.get() > 0) {
            signalled = true;
        }
        delta += Math.abs(message.get());
    }

    vertex.getValue().set(vertex.getValue().get() + delta);
    boolean converged = delta <= MIN_TOL.get(getConf());

    // send messages only when signalled
    if (delta > 0 && signalled) {
        if (!converged) {
            // update+signal message (need more help)
            sendMessageToAllEdges(vertex, new DoubleWritable(0.85 * delta / vertex.getNumEdges()));
        } else {
            // update only (I'm done)
            sendMessageToAllEdges(vertex, new DoubleWritable(-0.85 * delta / vertex.getNumEdges()));
        }
    }

    // always vote to halt
    vertex.voteToHalt();
}

From source file:org.apache.giraph.examples.DoubleSumAggregator.java

License:Apache License

@Override
public DoubleWritable getAggregatedValue() {
    return new DoubleWritable(sum);
}

From source file:org.apache.giraph.examples.Giraphx.java

License:Apache License

public void compute_original_pagerank(Iterator<Text> msgIterator) {
    // Get the number of vertices that have a change in pagerank greater than tolerance
    LongSumAggregator prsumAggreg = (LongSumAggregator) getAggregator("prsum");
    long prDiffSum = -1;
    if (getSuperstep() > 1) {
        prDiffSum = GiraphxWorkerContext.prDifferenceSum;
        if (getVertexId().get() == 0) {
            Log.info("SEREF Pagerank difference is: " + prDiffSum);
        }//from   w w w.  j av  a2s .c o  m
    }

    // Halt if max superstep is reached or no vertex has a significant value change
    if (getSuperstep() > MAX_SUPERSTEPS || prDiffSum == 0) {
        voteToHalt();
        return;
    }

    if (getSuperstep() == 0) { // Init pageRank value at ss=0
        needsOperation = false;
        double init = 1.0f;// / (double)NUM_VERTICES;
        //tolerance = init / 100.0;
        setVertexValue(new Text(init + ""));
    }

    if (getSuperstep() >= 1) {
        double sum = 0;
        while (msgIterator.hasNext()) {
            sum += Double.parseDouble(msgIterator.next().toString());
        }
        DoubleWritable vertexValue = new DoubleWritable((0.15f / getNumVertices()) + 0.85f * sum);
        double diff = Math.abs(
                Double.parseDouble(getVertexValue().toString()) - Double.parseDouble(vertexValue.toString()));
        //            Log.info("SEREF: Old and new pagerank values are: " + getVertexValue() +" -> " + vertexValue);

        String ctr_name = "PageRank difference in superstep " + getSuperstep();
        getContext().getCounter("Giraph Stats", ctr_name).increment((long) (diff / tolerance));
        if (diff > tolerance) {
            prsumAggreg.aggregate(1);
        }
        setVertexValue(new Text(vertexValue.toString()));
    }

    if (getSuperstep() < MAX_SUPERSTEPS && prDiffSum != 0) {
        sendMsgToAllEdges(new Text((Double.parseDouble(getVertexValue().toString()) / getNumOutEdges()) + ""));
    }
}

From source file:org.apache.giraph.examples.Giraphx.java

License:Apache License

private void compute_giraphx_pagerank(Iterator<Text> msgIterator) {
    // Get the number of vertices that have a change in pagerank greater than tolerance
    LongSumAggregator prsumAggreg = (LongSumAggregator) getAggregator("prsum");
    long prDiffSum = -1;
    if (getSuperstep() > 1) {
        prDiffSum = GiraphxWorkerContext.prDifferenceSum;
        if (getVertexId().get() == 0) {
            Log.info("SEREF Pagerank difference is: " + prDiffSum);
        }/*from w  w  w  .  j  a  v a2s . c  om*/
    }

    if (getSuperstep() > MAX_SUPERSTEPS || prDiffSum == 0) {
        voteToHalt();
        vertexValues.clear();
        return;
    }

    if (getSuperstep() == 0) {
        needsOperation = false;
        double init = 1.0f;// / (double)NUM_VERTICES;
        //tolerance = init / 100.0;
        setVertexValue(new Text(init + ""));

        destEdgeIndexList.remove(getVertexId());
        Text value = new Text((Double.parseDouble(getVertexValue().toString()) / getNumOutEdges()) + "");

        sendMsgToAllEdges(new Text("M1:" + getVertexId() + ":" + value));
        return;
    }

    double diff = -1;

    if (getSuperstep() >= 1) {
        double sum = 0;
        while (msgIterator.hasNext()) { // in this loop nonlocal neighbor pagerank values are read
            String tmp = msgIterator.next().toString();
            long id = Long.parseLong(tmp.split(":")[1]);
            double val = Double.parseDouble(tmp.split(":")[2]);
            vertexValues.put(new LongWritable(id), new Text(val + ""));
            if (getSuperstep() == 1) {
                incomingEdgeIndexList.add(new LongWritable(id));
            }
        }
        if (getSuperstep() == 1) {
            incomingEdgeIndexList.remove(getVertexId());
        } else {
            readPagerankFromNeighbors(vertexValues, incomingEdgeIndexList);
        }
        Iterator<Entry<LongWritable, Text>> vit = vertexValues.entrySet().iterator();
        while (vit.hasNext()) {
            Entry<LongWritable, Text> e = vit.next();
            double value = Double.parseDouble(e.getValue().toString());
            sum += value;
        }
        DoubleWritable vertexValue = new DoubleWritable((0.15f / getNumVertices()) + 0.85f * sum);
        diff = Math.abs(
                Double.parseDouble(getVertexValue().toString()) - Double.parseDouble(vertexValue.toString()));

        String ctr_name = "PageRank difference in superstep " + getSuperstep();
        getContext().getCounter("Giraph Stats", ctr_name).increment((long) (diff / tolerance));
        if (diff > tolerance) {
            prsumAggreg.aggregate(1);
            setVertexValue(new Text(vertexValue.toString()));
        }
    }

    if (getSuperstep() < MAX_SUPERSTEPS && diff > tolerance) {
        long edges = getNumOutEdges();
        String vval = (Double.parseDouble(getVertexValue().toString()) / edges) + "";
        String msg = "M1:" + getVertexId() + ":" + vval;
        sendMsgToDistantEdges(new Text(msg), destEdgeIndexList);
    }
    needsOperation = false;
    voteToHalt();

}

From source file:org.apache.giraph.examples.Giraphx.java

License:Apache License

private void compute_tGiraphx_coloring(Iterator<Text> msgIterator, boolean token2) {

    if (getSuperstep() > MAX_SUPERSTEPS) {
        voteToHalt();//from w w  w  . j av a2  s . c o m
        vertexValues.clear();
        available = null;
        needsOperation = false;
        return;
    }

    MaxAggregator maxAggreg = (MaxAggregator) getAggregator("max");

    //Log.info("SEREF Vertex "+getVertexId()+" started at superstep "+getSuperstep());
    if (getSuperstep() == 0) { // So that each vertex also learns its incoming edge IDs
        destEdgeIndexList.remove(getVertexId());
        sendMsgToAllEdges(new Text("M1:" + getVertexId() + ":" + getVertexValue()));
        return;
    }

    Iterator<Text> it = msgIterator;
    while (it.hasNext()) {
        String tmp = it.next().toString();
        //Log.info("SEREF Incoming Message is:\t"+tmp);
        long neighbor_id = Long.parseLong(tmp.split(":")[1]);

        if (getSuperstep() == 1) {
            incomingEdgeIndexList.add(new LongWritable(neighbor_id));
        } else {
            needsOperation = true;
            msgListMainMap.put(neighbor_id, tmp);
        }
    }

    //Log.info("SEREF msgListMainMap size is:\t"+msgListMainMap.size());
    if (getSuperstep() == 1) { // initialize all edges list at superstep 1
        incomingEdgeIndexList.remove(getVertexId());
        initAllEdgeIndexList();
        isInternal = checkIsInternal(allEdgeIndexList);
        if (isInternal) {
            getContext().getCounter("Giraph Stats", "Local vertex count").increment(1);
        }
        sendMsgToDistantEdges(new Text("M1:" + getVertexId() + ":" + getVertexValue()), allEdgeIndexList);
        maxAggreg.aggregate(new DoubleWritable(allEdgeIndexList.size()));
        return;
    }

    if (getSuperstep() == 2) {
        int degree = (int) GiraphxWorkerContext.maxDegree;
        colorsLength = 2 * degree + 5;
    }

    //Log.info("SEREF isInternal and token values Message is:\t"+isInternal+"\t"+token);    
    boolean isUpdated = false;
    if (isInternal || (!isInternal && token)) {
        isUpdated = operate_tGiraph_coloring(msgListMainMap);
    }

    if (isUpdated) {
        String msg = "M1:" + getVertexId() + ":" + getVertexValue().toString();
        //Log.info("SEREF sent message "+msg);
        sendMsgToDistantEdges(new Text(msg), allEdgeIndexList);
    }

    available = null;
    voteToHalt();
}

From source file:org.apache.giraph.examples.Giraphx.java

License:Apache License

public void compute_dGiraph(Iterator<Text> msgIterator) {

    if (getSuperstep() == MAX_SUPERSTEPS) {
        voteToHalt();//from   w  w w .  j  a  v  a  2 s  .c  o  m
        vertexValues.clear();
        available = null;
        needsOperation = false;
        return;
    }

    MaxAggregator maxAggreg = (MaxAggregator) getAggregator("max");

    long my_id = getVertexId().get();

    //Log.info("---- SEREF Vertex "+getVertexId()+" started at superstep "+getSuperstep()+" ----");
    if (getSuperstep() == 0) { // So that each vertex also learns its incoming edge IDs
        destEdgeIndexList.remove(getVertexId());
        sendMsgToAllEdges(new Text("M1:" + getVertexId() + ":" + getVertexValue()));
        return;
    }

    // Split incoming messages: vertex value:M1, incoming fork:M2, fork request:M3, fakeID:M4
    splitIncomingMessages(msgIterator);
    //Log.info("SEREF msgListMainMap size is:\t"+msgListMainMap.size());

    if (getSuperstep() == 1) { // initialize all edges list at superstep 1
        incomingEdgeIndexList.remove(getVertexId());
        initAllEdgeIndexList();
        //Log.info("SEREF allEdgeIndexList size is:\t"+allEdgeIndexList.size());

        if (VERSION_OF_JOB.contains("Giraphx")) {
            isInternal = checkIsInternal(allEdgeIndexList);
            if (isInternal) {
                getContext().getCounter("Giraph Stats", "Internal vertex count").increment(1);
            }
        }

        int random = (int) (Math.random() * NUM_VERTICES);
        fakeId = (long) (random * Math.pow(10, length) + getVertexId().get());
        sendMsgToAllVersioned(new Text("M4:" + fakeId));
        //Log.info("SEREF sent the fake ID "+fakeId);

        Text msg = new Text("M1:" + getVertexId() + ":" + getVertexValue());
        sendMsgToAllVersioned(msg);

        maxAggreg.aggregate(new DoubleWritable(allEdgeIndexList.size()));
        return;
    }

    if (getSuperstep() == 2) {
        int maxDegree = (int) GiraphxWorkerContext.maxDegree;
        if (!isInternal) {
            isNeighborsLocal(isLocalMap, allEdgeIndexList);
        } else {
            getContext().getCounter("Giraph Stats", "Local edge count").increment(allEdgeIndexList.size());
        }
        initAvailableColors(maxDegree);
        initForks(my_id);
        fakeIdList = null;
    }

    //Log.info("SEREF nonlocalNeighborCount size is:\t"+nonlocalNeighborCount);
    //Log.info("SEREF myForkMap size is:\t"+myForkMap.size());

    boolean hasAllForks = false;
    boolean hasAllCleanOrNoRequest = false;
    if (myForkMap.size() == nonlocalNeighborCount && !isInternal) {
        hasAllForks = true;
        hasAllCleanOrNoRequest = detectHasAllCleanOrNoRequest();
    }
    //Log.info("SEREF hasAllForks and isInternal values Message is:\t"+hasAllForks+"\t"+isInternal);

    boolean isUpdated = false;
    if (isInternal) { // only this block is used for a local vertex
        isUpdated = operateVersioned();
    } else if (hasAllForks && hasAllCleanOrNoRequest) {
        isUpdated = operateVersioned();
        for (long key : myForkMap.keySet()) {
            myForkMap.put(key, "DIRTY");
        }
        if (getSuperstep() == 2) { //In ss=2 there is no incoming request, thus send forks immediately
            Text m2msg = new Text("M2:" + my_id);
            for (Long key : myForkMap.keySet()) {
                sendMsg(new LongWritable(key), m2msg);
                //Log.info("SEREF sent message "+m2msg+" to "+key);
            }
            myForkMap.clear();
        }
    }

    if (isUpdated) {
        String msg = "M1:" + getVertexId() + ":" + getVertexValue().toString();
        //Log.info("SEREF sent message "+msg);
        sendMsgToAllVersioned(new Text(msg));
    }

    if (!isInternal) {
        handleForkRequestMessages(my_id);
    }

    hasAllForks = false;
    if (myForkMap.size() == nonlocalNeighborCount && !isInternal) {
        hasAllForks = true;
    }
    if (!isInternal && !hasAllForks && needsOperation) {
        requestMissingForks(my_id);
    }

    available = null;
    voteToHalt();

    //haltIfAllNodesPassive(hasOperatedNew, isUpdated);

    return;
}