Example usage for org.apache.hadoop.io DoubleWritable DoubleWritable

List of usage examples for org.apache.hadoop.io DoubleWritable DoubleWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io DoubleWritable DoubleWritable.

Prototype

public DoubleWritable(double value) 

Source Link

Usage

From source file:org.smartfrog.services.hadoop.benchmark.citerank.CheckConvergenceReducer.java

License:Open Source License

@Override
public void reduce(Text key, Iterator<DoubleWritable> values, OutputCollector<Text, DoubleWritable> output,
        Reporter report) throws IOException {
    double tolerance = 0;
    while (values.hasNext()) {
        DoubleWritable value = values.next();
        tolerance += value.get();/*w w w . j  a va 2  s .  co  m*/
    }
    output.collect(key, new DoubleWritable(tolerance));
}

From source file:org.smartfrog.services.hadoop.benchmark.citerank.DanglingPagesMapper.java

License:Open Source License

@Override
public void map(LongWritable key, Text value, OutputCollector<Text, DoubleWritable> output, Reporter report)
        throws IOException {
    StringTokenizer st = new StringTokenizer(value.toString());
    if (st.hasMoreTokens()) {
        st.nextToken();//from w  w w.  j  a  va2s .c  o m
        if (st.hasMoreTokens()) {
            double rank = Double.parseDouble(st.nextToken());
            st.nextToken(); // previous rank
            if (!st.hasMoreTokens()) {
                output.collect(KEY_NAME, new DoubleWritable(rank));
            }
        }
    }
}

From source file:org.smartfrog.services.hadoop.benchmark.citerank.DanglingPagesReducer.java

License:Open Source License

@Override
public void reduce(Text key, Iterator<DoubleWritable> values, OutputCollector<Text, DoubleWritable> output,
        Reporter report) throws IOException {
    double sum = 0;
    while (values.hasNext()) {
        DoubleWritable value = values.next();
        sum += value.get();//from w  ww  .jav  a  2  s .c  om
    }

    output.collect(key, new DoubleWritable(sum));
}

From source file:org.smartfrog.services.hadoop.benchmark.citerank.SortRanksMapper.java

License:Open Source License

@Override
public void map(LongWritable key, Text value, OutputCollector<DoubleWritable, Text> output, Reporter report)
        throws IOException {
    StringTokenizer st = new StringTokenizer(value.toString());
    String page = st.nextToken();
    double rank = Double.parseDouble(st.nextToken());

    output.collect(new DoubleWritable(rank), new Text(page));
}

From source file:org.trend.hgraph.mapreduce.pagerank.CalculateInitPageRankMapper.java

License:Apache License

@Override
protected void map(final ImmutableBytesWritable key, final Result value, final Context context)
        throws IOException, InterruptedException {
    String rowKey = Bytes.toString(key.get());
    double pageRank = Utils.getPageRank(value, Constants.PAGE_RANK_CQ_NAME);
    // write current pageRank to tmp
    Utils.writePageRank(vertexTable, rowKey, tmpPageRankCq, pageRank);
    List<String> outgoingRowKeys = null;
    Configuration conf = context.getConfiguration();

    context.getCounter(Counters.VERTEX_COUNT).increment(1);
    outgoingRowKeys = getOutgoingRowKeys(conf, vertexTable, edgeTable, rowKey,
            context.getCounter(Counters.GET_OUTGOING_VERTICES_TIME_CONSUMED));
    dispatchPageRank(outgoingRowKeys, pageRank, conf, edgeTable,
            context.getCounter(Counters.DISPATCH_PR_TIME_CONSUMED),
            context.getCounter(Counters.OUTGOING_EDGE_COUNT), new ContextWriterStrategy() {
                @Override/*from  w  w  w  . j a va  2 s  . c  o  m*/
                public void write(String key, double value) throws IOException, InterruptedException {
                    context.write(new Text(key), new DoubleWritable(value));
                }
            });
}

From source file:org.trend.hgraph.mapreduce.pagerank.CalculateIntermediatePageRankMapper.java

License:Apache License

@Override
protected void map(final Text key, final DoubleWritable value, final Context context)
        throws IOException, InterruptedException {
    String rowKey = Bytes.toString(key.getBytes()).trim();
    double pageRank = value.get();
    // write current pageRank to tmp
    Utils.writePageRank(vertexTable, rowKey, tmpPageRankCq, pageRank);

    Configuration conf = context.getConfiguration();
    List<String> outgoingRowKeys = null;

    context.getCounter(Counters.VERTEX_COUNT).increment(1);
    outgoingRowKeys = getOutgoingRowKeys(conf, vertexTable, edgeTable, rowKey,
            context.getCounter(Counters.GET_OUTGOING_VERTICES_TIME_CONSUMED));
    dispatchPageRank(outgoingRowKeys, pageRank, conf, edgeTable,
            context.getCounter(Counters.DISPATCH_PR_TIME_CONSUMED),
            context.getCounter(Counters.OUTGOING_EDGE_COUNT), new ContextWriterStrategy() {
                @Override/*from w w  w  .  j  ava2s  .c o m*/
                public void write(String key, double value) throws IOException, InterruptedException {
                    context.write(new Text(key), new DoubleWritable(value));
                }
            });
}

From source file:org.trend.hgraph.mapreduce.pagerank.CalculatePageRankReducer.java

License:Apache License

@Override
protected void reduce(Text key, Iterable<DoubleWritable> incomingPageRanks, Context context)
        throws IOException, InterruptedException {

    String rowkey = Bytes.toString(key.getBytes()).trim();
    double incomingPageRankSum = 0.0D;
    StopWatch sw = new StopWatch();
    sw.start();/*from   w  ww . ja va 2s.  co  m*/
    for (DoubleWritable incomingPageRank : incomingPageRanks) {
        incomingPageRankSum = incomingPageRankSum + incomingPageRank.get();
    }
    // calculate new pageRank here
    double newPageRank = (dampingFactor * incomingPageRankSum) + ((1.0D - dampingFactor) / verticesTotalCnt);
    sw.stop();
    context.getCounter(Counters.CAL_NEW_PR_TIME_CONSUMED).increment(sw.getTime());

    sw.reset();
    sw.start();
    double oldPageRank = Utils.getPageRank(vertexTable, rowkey, Constants.PAGE_RANK_CQ_TMP_NAME);
    if (!pageRankEquals(oldPageRank, newPageRank, pageRankCompareScale)) {
        // collect pageRank changing count with counter
        context.getCounter(Counters.CHANGED_PAGE_RANK_COUNT).increment(1);
    }
    sw.stop();
    context.getCounter(Counters.CMP_OLD_NEW_PR_TIME_CONSUMED).increment(sw.getTime());

    context.write(key, new DoubleWritable(newPageRank));
}

From source file:org.trustedanalytics.atk.giraph.algorithms.als.AlternatingLeastSquaresComputation.java

License:Apache License

@Override
public void compute(Vertex<CFVertexId, VertexData4CFWritable, EdgeData4CFWritable> vertex,
        Iterable<MessageData4CFWritable> messages) throws IOException {
    long step = getSuperstep();
    if (step == 0) {
        initialize(vertex);/*from www.jav  a2s .  c  o  m*/
        vertex.voteToHalt();
        return;
    }

    Vector currentValue = vertex.getValue().getVector();
    double currentBias = vertex.getValue().getBias();
    // update aggregators every (2 * interval) super steps
    if ((step % (2 * learningCurveOutputInterval)) == 0) {
        double errorOnTrain = 0d;
        double errorOnValidate = 0d;
        double errorOnTest = 0d;
        int numTrain = 0;
        for (MessageData4CFWritable message : messages) {
            EdgeType et = message.getType();
            double weight = message.getWeight();
            Vector vector = message.getVector();
            double otherBias = message.getBias();
            double predict = currentBias + otherBias + currentValue.dot(vector);
            double e = weight - predict;
            switch (et) {
            case TRAIN:
                errorOnTrain += e * e;
                numTrain++;
                break;
            case VALIDATE:
                errorOnValidate += e * e;
                break;
            case TEST:
                errorOnTest += e * e;
                break;
            default:
                throw new IllegalArgumentException("Unknown recognized edge type: " + et.toString());
            }
        }
        double costOnTrain = 0d;
        if (numTrain > 0) {
            costOnTrain = errorOnTrain / numTrain
                    + lambda * (currentBias * currentBias + currentValue.dot(currentValue));
        }
        aggregate(SUM_TRAIN_COST, new DoubleWritable(costOnTrain));
        aggregate(SUM_VALIDATE_ERROR, new DoubleWritable(errorOnValidate));
        aggregate(SUM_TEST_ERROR, new DoubleWritable(errorOnTest));
    }

    // update vertex value
    if (step < maxSupersteps) {
        // xxt records the result of x times x transpose
        Matrix xxt = new DenseMatrix(featureDimension, featureDimension);
        xxt = xxt.assign(0d);
        // xr records the result of x times rating
        Vector xr = currentValue.clone().assign(0d);
        int numTrain = 0;
        for (MessageData4CFWritable message : messages) {
            EdgeType et = message.getType();
            if (et == EdgeType.TRAIN) {
                double weight = message.getWeight();
                Vector vector = message.getVector();
                double otherBias = message.getBias();
                xxt = xxt.plus(vector.cross(vector));
                xr = xr.plus(vector.times(weight - currentBias - otherBias));
                numTrain++;
            }
        }
        xxt = xxt.plus(new DiagonalMatrix(lambda * numTrain, featureDimension));
        Matrix bMatrix = new DenseMatrix(featureDimension, 1).assignColumn(0, xr);
        Vector value = new QRDecomposition(xxt).solve(bMatrix).viewColumn(0);
        vertex.getValue().setVector(value);

        // update vertex bias
        if (biasOn) {
            double bias = computeBias(value, messages);
            vertex.getValue().setBias(bias);
        }

        // send out messages
        for (Edge<CFVertexId, EdgeData4CFWritable> edge : vertex.getEdges()) {
            MessageData4CFWritable newMessage = new MessageData4CFWritable(vertex.getValue(), edge.getValue());
            sendMessage(edge.getTargetVertexId(), newMessage);
        }
    }

    vertex.voteToHalt();
}

From source file:org.trustedanalytics.atk.giraph.algorithms.cgd.ConjugateGradientDescentComputation.java

License:Apache License

@Override
public void compute(Vertex<CFVertexId, VertexData4CGDWritable, EdgeData4CFWritable> vertex,
        Iterable<MessageData4CFWritable> messages) throws IOException {
    long step = getSuperstep();
    if (step == 0) {
        initialize(vertex);/*from  ww w  .  j  a  va 2  s.  c  o m*/
        vertex.voteToHalt();
        return;
    }

    Vector currentValue = vertex.getValue().getVector();
    double currentBias = vertex.getValue().getBias();
    // update aggregators every (2 * interval) super steps
    if ((step % (2 * learningCurveOutputInterval)) == 0) {
        double errorOnTrain = 0d;
        double errorOnValidate = 0d;
        double errorOnTest = 0d;
        int numTrain = 0;
        for (MessageData4CFWritable message : messages) {
            EdgeType et = message.getType();
            double weight = message.getWeight();
            Vector vector = message.getVector();
            double otherBias = message.getBias();
            double predict = currentBias + otherBias + currentValue.dot(vector);
            double e = weight - predict;
            switch (et) {
            case TRAIN:
                errorOnTrain += e * e;
                numTrain++;
                break;
            case VALIDATE:
                errorOnValidate += e * e;
                break;
            case TEST:
                errorOnTest += e * e;
                break;
            default:
                throw new IllegalArgumentException("Unknown recognized edge type: " + et.toString());
            }
        }
        double costOnTrain = 0d;
        if (numTrain > 0) {
            costOnTrain = errorOnTrain / numTrain
                    + lambda * (currentBias * currentBias + currentValue.dot(currentValue));
        }
        aggregate(SUM_TRAIN_COST, new DoubleWritable(costOnTrain));
        aggregate(SUM_VALIDATE_ERROR, new DoubleWritable(errorOnValidate));
        aggregate(SUM_TEST_ERROR, new DoubleWritable(errorOnTest));
    }

    if (step < maxSupersteps) {
        // implement CGD iterations
        Vector value0 = vertex.getValue().getVector();
        Vector gradient0 = vertex.getValue().getGradient();
        Vector conjugate0 = vertex.getValue().getConjugate();
        double bias0 = vertex.getValue().getBias();
        for (int i = 0; i < numCGDIters; i++) {
            double alpha = computeAlpha(gradient0, conjugate0, messages);
            Vector value = value0.plus(conjugate0.times(alpha));
            Vector gradient = computeGradient(bias0, value, messages);
            double beta = computeBeta(gradient0, conjugate0, gradient);
            Vector conjugate = conjugate0.times(beta).minus(gradient);
            value0 = value;
            gradient0 = gradient;
            conjugate0 = conjugate;
        }
        // update vertex values
        vertex.getValue().setVector(value0);
        vertex.getValue().setConjugate(conjugate0);
        vertex.getValue().setGradient(gradient0);

        // update vertex bias
        if (biasOn) {
            double bias = computeBias(value0, messages);
            vertex.getValue().setBias(bias);
        }

        // send out messages
        for (Edge<CFVertexId, EdgeData4CFWritable> edge : vertex.getEdges()) {
            MessageData4CFWritable newMessage = new MessageData4CFWritable(vertex.getValue(), edge.getValue());
            sendMessage(edge.getTargetVertexId(), newMessage);
        }
    }

    vertex.voteToHalt();
}

From source file:org.trustedanalytics.atk.giraph.algorithms.lbp.LoopyBeliefPropagationComputation.java

License:Apache License

@Override
public void compute(Vertex<LongWritable, VertexData4LBPWritable, DoubleWritable> vertex,
        Iterable<IdWithVectorMessage> messages) throws IOException {
    long step = getSuperstep();
    if (step == 0) {
        initializeVertex(vertex);//from  w  ww  .jav a2  s  .  c om
        return;
    }

    // collect messages sent to this vertex
    HashMap<Long, Vector> map = new HashMap<Long, Vector>();
    for (IdWithVectorMessage message : messages) {
        map.put(message.getData(), message.getVector());
    }

    // update posterior according to prior and messages
    VertexData4LBPWritable vertexValue = vertex.getValue();
    VertexType vt = vertexValue.getType();
    vt = ignoreVertexType ? VertexType.TRAIN : vt;
    Vector prior = vertexValue.getPriorVector();
    double nStates = prior.size();
    if (vt != VertexType.TRAIN) {
        // assign a uniform prior for validate/test vertex
        prior = prior.clone().assign(Math.log(1.0 / nStates));
    }
    // sum of prior and messages
    Vector sumPosterior = prior;
    for (IdWithVectorMessage message : messages) {
        sumPosterior = sumPosterior.plus(message.getVector());
    }
    sumPosterior = sumPosterior.plus(-sumPosterior.maxValue());
    // update posterior if this isn't an anchor vertex
    if (prior.maxValue() < anchorThreshold) {
        // normalize posterior
        Vector posterior = sumPosterior.clone().assign(Functions.EXP);
        posterior = posterior.normalize(1d);
        Vector oldPosterior = vertexValue.getPosteriorVector();
        double delta = posterior.minus(oldPosterior).norm(1d);
        // aggregate deltas
        switch (vt) {
        case TRAIN:
            aggregate(SUM_TRAIN_DELTA, new DoubleWritable(delta));
            break;
        case VALIDATE:
            aggregate(SUM_VALIDATE_DELTA, new DoubleWritable(delta));
            break;
        case TEST:
            aggregate(SUM_TEST_DELTA, new DoubleWritable(delta));
            break;
        default:
            throw new IllegalArgumentException("Unknown vertex type: " + vt.toString());
        }
        // update posterior
        vertexValue.setPosteriorVector(posterior);
    }

    if (step < maxSupersteps) {
        // if it's not a training vertex, don't send out messages
        if (vt != VertexType.TRAIN) {
            return;
        }
        IdWithVectorMessage newMessage = new IdWithVectorMessage();
        newMessage.setData(vertex.getId().get());
        // update belief
        Vector belief = prior.clone();
        for (Edge<LongWritable, DoubleWritable> edge : vertex.getEdges()) {
            double weight = edge.getValue().get();
            long id = edge.getTargetVertexId().get();
            Vector tempVector = sumPosterior;
            if (map.containsKey(id)) {
                tempVector = sumPosterior.minus(map.get(id));
            }
            for (int i = 0; i < nStates; i++) {
                double sum = 0d;
                for (int j = 0; j < nStates; j++) {
                    double msg = Math.exp(
                            tempVector.getQuick(j) + edgePotential(Math.abs(i - j) / (nStates - 1), weight));
                    if (maxProduct) {
                        sum = sum > msg ? sum : msg;
                    } else {
                        sum += msg;
                    }
                }
                belief.setQuick(i, sum > 0d ? Math.log(sum) : Double.MIN_VALUE);
            }
            belief = belief.plus(-belief.maxValue());
            newMessage.setVector(belief);
            sendMessage(edge.getTargetVertexId(), newMessage);
        }
    } else {
        // convert prior back to regular scale before output
        prior = vertexValue.getPriorVector();
        prior = prior.assign(Functions.EXP);
        vertexValue.setPriorVector(prior);
        vertex.voteToHalt();
    }
}