Example usage for org.apache.hadoop.io DoubleWritable DoubleWritable

List of usage examples for org.apache.hadoop.io DoubleWritable DoubleWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io DoubleWritable DoubleWritable.

Prototype

public DoubleWritable(double value) 

Source Link

Usage

From source file:com.soteradefense.dga.io.formats.DGAEdgeTDTOutputFormatTest.java

License:Apache License

@Before
public void setUp() throws Exception {
    GiraphConfiguration giraphConfiguration = new GiraphConfiguration();
    conf = new ImmutableClassesGiraphConfiguration<Text, Text, Text>(giraphConfiguration);
    tac = mock(TaskAttemptContext.class);

    vertex = mock(Vertex.class);
    when(vertex.getId()).thenReturn(new Text("34"));
    when(vertex.getValue()).thenReturn(new DoubleWritable(10.43433333389));

    Iterable<Edge<Text, Text>> iterable = mock(Iterable.class);
    Iterator<Edge<Text, Text>> iterator = mock(Iterator.class);
    when(iterable.iterator()).thenReturn(iterator);

    edge1 = mock(Edge.class);
    when(edge1.getTargetVertexId()).thenReturn(new Text("12"));
    when(edge1.getValue()).thenReturn(new Text("1"));

    edge2 = mock(Edge.class);
    when(edge2.getTargetVertexId()).thenReturn(new Text("6"));
    when(edge2.getValue()).thenReturn(new Text("4"));

    rw = mock(RecordWriter.class);

    when(iterator.hasNext()).thenReturn(true, true, false);
    when(iterator.next()).thenReturn(edge1, edge2);

}

From source file:com.soteradefense.dga.louvain.giraph.LouvainComputation.java

License:Apache License

private void aggregateQ(Double q) {
    aggregate(ACTUAL_Q_AGG, new DoubleWritable(q));
}

From source file:com.soteradefense.dga.pr.PageRankComputation.java

License:Apache License

@Override
public void compute(Vertex<Text, DoubleWritable, Text> vertex, Iterable<DoubleWritable> messages)
        throws IOException {

    float dampingFactor = this.getConf().getFloat(DAMPING_FACTOR, DAMPING_FACTOR_DEFAULT_VALUE);

    long step = getSuperstep();

    if (step == 0) {
        //set initial value
        logger.debug("Superstep is 0: Setting the default value.");
        vertex.setValue(new DoubleWritable(1.0 / getTotalNumVertices()));
    } else { // go until no one votes to continue

        double rank = 0;
        for (DoubleWritable partial : messages) {
            rank += partial.get();//from  w  ww . j a  v a 2s .com
        }
        rank = ((1 - dampingFactor) / getTotalNumVertices()) + (dampingFactor * rank);
        double vertexValue = vertex.getValue().get();
        double delta = Math.abs(rank - vertexValue) / vertexValue;
        aggregate(MAX_EPSILON, new DoubleWritable(delta));
        vertex.setValue(new DoubleWritable(rank));
        logger.debug("{} is calculated {} for a PageRank.", vertex.getId(), rank);
    }
    distributeRank(vertex);
}

From source file:com.soteradefense.dga.pr.PageRankComputation.java

License:Apache License

private void distributeRank(Vertex<Text, DoubleWritable, Text> vertex) {
    double rank = vertex.getValue().get() / vertex.getNumEdges();
    sendMessageToAllEdges(vertex, new DoubleWritable(rank));
}

From source file:com.talis.labs.pagerank.mapreduce.CheckConvergenceMapper.java

License:Apache License

@Override
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
    StringTokenizer st = new StringTokenizer(value.toString());
    st.nextToken(); // page
    double pagerank = Double.parseDouble(st.nextToken());
    double previous_pagerank = Double.parseDouble(st.nextToken());

    context.write(KEY_NAME, new DoubleWritable(Math.abs(pagerank - previous_pagerank)));
}

From source file:com.talis.labs.pagerank.mapreduce.CheckConvergenceReducer.java

License:Apache License

@Override
public void reduce(Text key, Iterable<DoubleWritable> values, Context context)
        throws IOException, InterruptedException {
    double tolerance = 0;
    for (DoubleWritable value : values) {
        tolerance += value.get();/*from  w  ww.  j a  v a2s . c  om*/
    }
    context.write(key, new DoubleWritable(tolerance));
}

From source file:com.talis.labs.pagerank.mapreduce.DanglingPagesMapper.java

License:Apache License

@Override
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
    StringTokenizer st = new StringTokenizer(value.toString());
    if (st.hasMoreTokens()) {
        st.nextToken();//w w  w . j ava2s.  co m
        if (st.hasMoreTokens()) {
            double pagerank = Double.parseDouble(st.nextToken());
            st.nextToken(); // previous pagerank
            if (!st.hasMoreTokens()) {
                context.write(KEY_NAME, new DoubleWritable(pagerank));
            }
        }
    }
}

From source file:com.talis.labs.pagerank.mapreduce.DanglingPagesReducer.java

License:Apache License

@Override
public void reduce(Text key, Iterable<DoubleWritable> values, Context context)
        throws IOException, InterruptedException {
    double sum = 0;
    for (DoubleWritable value : values) {
        sum += value.get();//  w w w  .  j a v  a  2 s  .  com
    }
    context.write(key, new DoubleWritable(sum));
}

From source file:com.talis.labs.pagerank.mapreduce.SortPageRanksMapper.java

License:Apache License

@Override
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
    StringTokenizer st = new StringTokenizer(value.toString());
    String page = st.nextToken();
    double pagerank = Double.parseDouble(st.nextToken());

    context.write(new DoubleWritable(pagerank), new Text(page));
}

From source file:com.tfm.utad.reducerdata.ReducerDataVerticaMapper.java

@Override
public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
    String[] values = value.toString().replaceAll("\n", "").split(ReducerConstants.SPLIT_SEMICOLON);
    Double latitude = Double.valueOf(values[0]);
    Double longitude = Double.valueOf(values[1]);
    String user = values[2];// ww  w .ja v  a2s . c  o m
    Date date;
    try {
        date = sdf.parse(values[3]);
    } catch (ParseException ex) {
        LOG.error("Error parsing date: " + ex.toString());
        date = null;
    }
    String activity = values[4];
    if (isValidKey(latitude, longitude, user)) {
        Text text = (Text) key;
        Long id = Long.valueOf(text.toString());
        Long userid = Long.valueOf(user.substring(4));
        context.write(new Text(user + activity),
                new ReducerVerticaValue(new LongWritable(id), new Text(user), date, new Text(activity),
                        new DoubleWritable(latitude), new DoubleWritable(longitude), new LongWritable(userid)));
    } else {
        LOG.error("Invalid values in line: " + value.toString());
        context.getCounter(ReducerDataEnum.MALFORMED_DATA).increment(1);
    }
}