Example usage for org.apache.hadoop.io DoubleWritable DoubleWritable

List of usage examples for org.apache.hadoop.io DoubleWritable DoubleWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io DoubleWritable DoubleWritable.

Prototype

public DoubleWritable(double value) 

Source Link

Usage

From source file:edu.umn.cs.spatialHadoop.operations.KNN.java

License:Open Source License

/**
 * A MapReduce version of KNN query.//from  ww  w  .ja  v a2s. c o m
 * @param fs
 * @param inputPath
 * @param queryPoint
 * @param shape
 * @param output
 * @return
 * @throws IOException
 * @throws InterruptedException 
 * @throws ClassNotFoundException 
 */
private static <S extends Shape> Job knnMapReduce(Path inputPath, Path userOutputPath, OperationsParams params)
        throws IOException, ClassNotFoundException, InterruptedException {
    Job job = new Job(params, "KNN");
    job.setJarByClass(KNN.class);

    FileSystem inFs = inputPath.getFileSystem(params);
    job.setInputFormatClass(SpatialInputFormat3.class);
    SpatialInputFormat3.setInputPaths(job, inputPath);

    job.setMapperClass(KNNMap.class);
    job.setMapOutputKeyClass(NullWritable.class);
    job.setMapOutputValueClass(TextWithDistance.class);

    job.setReducerClass(KNNReduce.class);
    job.setNumReduceTasks(1);

    job.getConfiguration().setClass(SpatialSite.FilterClass, RangeFilter.class, BlockFilter.class);
    final Point queryPoint = (Point) params.getShape("point");
    final int k = params.getInt("k", 1);

    final IntWritable additional_blocks_2b_processed = new IntWritable(0);
    long resultCount;
    int iterations = 0;

    Path outputPath = userOutputPath;
    if (outputPath == null) {
        do {
            outputPath = new Path(inputPath.getName() + ".knn_" + (int) (Math.random() * 1000000));
        } while (inFs.exists(outputPath));
    }
    job.setOutputFormatClass(TextOutputFormat3.class);
    TextOutputFormat3.setOutputPath(job, outputPath);

    GlobalIndex<Partition> globalIndex = SpatialSite.getGlobalIndex(inFs, inputPath);
    Configuration templateConf = job.getConfiguration();

    FileSystem outFs = outputPath.getFileSystem(params);
    // Start with the query point to select all partitions overlapping with it
    Shape range_for_this_iteration = new Point(queryPoint.x, queryPoint.y);

    do {
        job = new Job(templateConf);
        // Delete results of last iteration if not first iteration
        if (outputPath != null)
            outFs.delete(outputPath, true);

        LOG.info("Running iteration: " + (++iterations));
        // Set query range for the SpatialInputFormat
        OperationsParams.setShape(job.getConfiguration(), RangeFilter.QueryRange, range_for_this_iteration);

        // Submit the job
        if (params.getBoolean("background", false)) {
            // XXX this is incorrect because if the job needs multiple iterations,
            // it will run only the first one
            job.waitForCompletion(false);
            return job;
        }
        job.waitForCompletion(false);

        // Retrieve answers for this iteration
        Counters counters = job.getCounters();
        Counter resultSizeCounter = counters.findCounter(Task.Counter.REDUCE_OUTPUT_RECORDS);
        resultCount = resultSizeCounter.getValue();

        if (globalIndex != null) {
            Circle range_for_next_iteration;
            if (resultCount < k) {
                LOG.info("Found only " + resultCount + " results");
                // Did not find enough results in the query space
                // Increase the distance by doubling the maximum distance among all
                // partitions that were processed
                final DoubleWritable maximum_distance = new DoubleWritable(0);
                int matched_partitions = globalIndex.rangeQuery(range_for_this_iteration,
                        new ResultCollector<Partition>() {
                            @Override
                            public void collect(Partition p) {
                                double distance = p.getMaxDistanceTo(queryPoint.x, queryPoint.y);
                                if (distance > maximum_distance.get())
                                    maximum_distance.set(distance);
                            }
                        });
                if (matched_partitions == 0) {
                    // The query point is outside the search space
                    // Set the range to include the closest partition
                    globalIndex.knn(queryPoint.x, queryPoint.y, 1, new ResultCollector2<Partition, Double>() {
                        @Override
                        public void collect(Partition r, Double s) {
                            maximum_distance.set(s);
                        }
                    });
                }
                range_for_next_iteration = new Circle(queryPoint.x, queryPoint.y, maximum_distance.get() * 2);
                LOG.info("Expanding to " + maximum_distance.get() * 2);
            } else {
                // Calculate the new test range which is a circle centered at the
                // query point and distance to the k^{th} neighbor

                // Get distance to the kth neighbor
                final DoubleWritable distance_to_kth_neighbor = new DoubleWritable();
                FileStatus[] results = outFs.listStatus(outputPath);
                for (FileStatus result_file : results) {
                    if (result_file.getLen() > 0 && result_file.getPath().getName().startsWith("part-")) {
                        // Read the last line (kth neighbor)
                        Tail.tail(outFs, result_file.getPath(), 1, new TextWithDistance(),
                                new ResultCollector<TextWithDistance>() {

                                    @Override
                                    public void collect(TextWithDistance r) {
                                        distance_to_kth_neighbor.set(r.distance);
                                    }
                                });
                    }
                }
                range_for_next_iteration = new Circle(queryPoint.x, queryPoint.y,
                        distance_to_kth_neighbor.get());
                LOG.info("Expanding to kth neighbor: " + distance_to_kth_neighbor);
            }

            // Calculate the number of blocks to be processed to check the
            // terminating condition;
            additional_blocks_2b_processed.set(0);
            final Shape temp = range_for_this_iteration;
            globalIndex.rangeQuery(range_for_next_iteration, new ResultCollector<Partition>() {
                @Override
                public void collect(Partition p) {
                    if (!(p.isIntersected(temp))) {
                        additional_blocks_2b_processed.set(additional_blocks_2b_processed.get() + 1);
                    }
                }
            });
            range_for_this_iteration = range_for_next_iteration;
        }
    } while (additional_blocks_2b_processed.get() > 0);

    // If output file is not set by user, delete it
    if (userOutputPath == null)
        outFs.delete(outputPath, true);
    TotalIterations.addAndGet(iterations);

    return job;
}

From source file:edu.umn.cs.sthadoop.operations.HSPKNNQ.java

License:Open Source License

/**
 * A MapReduce version of KNN query./*from w  w  w. j av  a 2  s.  co m*/
 * @param fs
 * @param inputPath
 * @param queryPoint
 * @param shape
 * @param output
 * @return
 * @throws IOException
 * @throws InterruptedException 
 * @throws ClassNotFoundException 
 */
private static <S extends Shape> Job knnMapReduce(Path inputPath, Path userOutputPath, OperationsParams params)
        throws IOException, ClassNotFoundException, InterruptedException {
    Job job = new Job(params, "PKNN");
    job.setJarByClass(HSPKNNQ.class);

    FileSystem inFs = inputPath.getFileSystem(params);
    job.setInputFormatClass(SpatialInputFormat3.class);
    SpatialInputFormat3.setInputPaths(job, inputPath);

    job.setMapperClass(KNNMap.class);
    job.setMapOutputKeyClass(NullWritable.class);
    job.setMapOutputValueClass(TextWithDistance.class);

    job.setReducerClass(KNNReduce.class);
    job.setNumReduceTasks(1);

    job.getConfiguration().setClass(SpatialSite.FilterClass, RangeFilter.class, BlockFilter.class);
    final Point queryPoint = (Point) params.getShape("point");
    final int k = params.getInt("k", 1);

    final IntWritable additional_blocks_2b_processed = new IntWritable(0);
    long resultCount;
    int iterations = 0;

    Path outputPath = userOutputPath;
    if (outputPath == null) {
        do {
            outputPath = new Path(inputPath.getName() + ".knn_" + (int) (Math.random() * 1000000));
        } while (inFs.exists(outputPath));
    }
    job.setOutputFormatClass(TextOutputFormat3.class);
    TextOutputFormat3.setOutputPath(job, outputPath);

    GlobalIndex<Partition> globalIndex = SpatialSite.getGlobalIndex(inFs, inputPath);
    Configuration templateConf = job.getConfiguration();

    FileSystem outFs = outputPath.getFileSystem(params);
    // Start with the query point to select all partitions overlapping with it
    Shape range_for_this_iteration = new Point(queryPoint.x, queryPoint.y);

    do {
        job = new Job(templateConf);
        // Delete results of last iteration if not first iteration
        if (outputPath != null)
            outFs.delete(outputPath, true);

        LOG.info("Running iteration: " + (++iterations));
        // Set query range for the SpatialInputFormat
        OperationsParams.setShape(job.getConfiguration(), RangeFilter.QueryRange, range_for_this_iteration);

        // Submit the job
        if (params.getBoolean("background", false)) {
            // XXX this is incorrect because if the job needs multiple iterations,
            // it will run only the first one
            job.waitForCompletion(false);
            return job;
        }
        job.waitForCompletion(false);

        // Retrieve answers for this iteration
        Counters counters = job.getCounters();
        Counter resultSizeCounter = counters.findCounter(Task.Counter.REDUCE_OUTPUT_RECORDS);
        resultCount = resultSizeCounter.getValue();

        if (globalIndex != null) {
            Circle range_for_next_iteration;
            if (resultCount < k) {
                LOG.info("Found only " + resultCount + " results");
                // Did not find enough results in the query space
                // Increase the distance by doubling the maximum distance among all
                // partitions that were processed
                final DoubleWritable maximum_distance = new DoubleWritable(0);
                int matched_partitions = globalIndex.rangeQuery(range_for_this_iteration,
                        new ResultCollector<Partition>() {
                            @Override
                            public void collect(Partition p) {
                                double distance = p.getMaxDistanceTo(queryPoint.x, queryPoint.y);
                                if (distance > maximum_distance.get())
                                    maximum_distance.set(distance);
                            }
                        });
                if (matched_partitions == 0) {
                    // The query point is outside the search space
                    // Set the range to include the closest partition
                    globalIndex.knn(queryPoint.x, queryPoint.y, 1, new ResultCollector2<Partition, Double>() {
                        @Override
                        public void collect(Partition r, Double s) {
                            maximum_distance.set(s);
                        }
                    });
                }
                range_for_next_iteration = new Circle(queryPoint.x, queryPoint.y, maximum_distance.get() * 2);
                LOG.info("Expanding to " + maximum_distance.get() * 2);
            } else {
                // Calculate the new test range which is a circle centered at the
                // query point and distance to the k^{th} neighbor

                // Get distance to the kth neighbor
                final DoubleWritable distance_to_kth_neighbor = new DoubleWritable();
                FileStatus[] results = outFs.listStatus(outputPath);
                for (FileStatus result_file : results) {
                    if (result_file.getLen() > 0 && result_file.getPath().getName().startsWith("part-")) {
                        // Read the last line (kth neighbor)
                        Tail.tail(outFs, result_file.getPath(), 1, new TextWithDistance(),
                                new ResultCollector<TextWithDistance>() {

                                    @Override
                                    public void collect(TextWithDistance r) {
                                        distance_to_kth_neighbor.set(r.distance);
                                    }
                                });
                    }
                }
                range_for_next_iteration = new Circle(queryPoint.x, queryPoint.y,
                        distance_to_kth_neighbor.get());
                LOG.info("Expanding to kth neighbor: " + distance_to_kth_neighbor);
            }

            // Calculate the number of blocks to be processed to check the
            // terminating condition;
            additional_blocks_2b_processed.set(0);
            final Shape temp = range_for_this_iteration;
            globalIndex.rangeQuery(range_for_next_iteration, new ResultCollector<Partition>() {
                @Override
                public void collect(Partition p) {
                    if (!(p.isIntersected(temp))) {
                        additional_blocks_2b_processed.set(additional_blocks_2b_processed.get() + 1);
                    }
                }
            });
            range_for_this_iteration = range_for_next_iteration;
        }
    } while (additional_blocks_2b_processed.get() > 0);

    // If output file is not set by user, delete it
    if (userOutputPath == null)
        outFs.delete(outputPath, true);
    TotalIterations.addAndGet(iterations);

    return job;
}

From source file:edu.umn.cs.sthadoop.trajectory.KNNDTW.java

License:Open Source License

/**
 * A MapReduce version of KNN query./*www . ja  v  a 2s .c o m*/
 * 
 * @param fs
 * @param inputPath
 * @param queryPoint
 * @param shape
 * @param output
 * @return
 * @throws IOException
 * @throws InterruptedException
 * @throws ClassNotFoundException
 */
private static <S extends Shape> Job knnMapReduce(Path inputPath, Path userOutputPath, OperationsParams params)
        throws IOException, ClassNotFoundException, InterruptedException {
    Job job = new Job(params, "KNN");
    job.setJarByClass(KNNDTW.class);

    FileSystem inFs = inputPath.getFileSystem(params);
    job.setInputFormatClass(SpatialInputFormat3.class);
    SpatialInputFormat3.setInputPaths(job, inputPath);

    job.setMapperClass(KNNMap.class);
    job.setMapOutputKeyClass(NullWritable.class);
    job.setMapOutputValueClass(TextWithDistance.class);

    job.setReducerClass(KNNReduce.class);
    job.setNumReduceTasks(1);

    job.getConfiguration().setClass(SpatialSite.FilterClass, RangeFilter.class, BlockFilter.class);
    final Point queryPoint = (Point) params.getShape("point");
    final int k = params.getInt("k", 1);

    final IntWritable additional_blocks_2b_processed = new IntWritable(0);
    long resultCount;
    int iterations = 0;

    Path outputPath = userOutputPath;
    if (outputPath == null) {
        do {
            outputPath = new Path(inputPath.getName() + ".knn_" + (int) (Math.random() * 1000000));
        } while (inFs.exists(outputPath));
    }
    job.setOutputFormatClass(TextOutputFormat3.class);
    TextOutputFormat3.setOutputPath(job, outputPath);

    GlobalIndex<Partition> globalIndex = SpatialSite.getGlobalIndex(inFs, inputPath);
    Configuration templateConf = job.getConfiguration();

    FileSystem outFs = outputPath.getFileSystem(params);
    // Start with the query point to select all partitions overlapping with
    // it
    Shape range_for_this_iteration = new Point(queryPoint.x, queryPoint.y);

    do {
        job = new Job(templateConf);
        // Delete results of last iteration if not first iteration
        if (outputPath != null)
            outFs.delete(outputPath, true);

        LOG.info("Running iteration: " + (++iterations));
        // Set query range for the SpatialInputFormat
        OperationsParams.setShape(job.getConfiguration(), RangeFilter.QueryRange, range_for_this_iteration);

        // Submit the job
        if (params.getBoolean("background", false)) {
            // XXX this is incorrect because if the job needs multiple
            // iterations,
            // it will run only the first one
            job.waitForCompletion(false);
            return job;
        }
        job.waitForCompletion(false);

        // Retrieve answers for this iteration
        Counters counters = job.getCounters();
        Counter resultSizeCounter = counters.findCounter(Task.Counter.REDUCE_OUTPUT_RECORDS);
        resultCount = resultSizeCounter.getValue();

        if (globalIndex != null) {
            Circle range_for_next_iteration;
            if (resultCount < k) {
                LOG.info("Found only " + resultCount + " results");
                // Did not find enough results in the query space
                // Increase the distance by doubling the maximum distance
                // among all
                // partitions that were processed
                final DoubleWritable maximum_distance = new DoubleWritable(0);
                int matched_partitions = globalIndex.rangeQuery(range_for_this_iteration,
                        new ResultCollector<Partition>() {
                            @Override
                            public void collect(Partition p) {
                                double distance = p.getMaxDistanceTo(queryPoint.x, queryPoint.y);
                                if (distance > maximum_distance.get())
                                    maximum_distance.set(distance);
                            }
                        });
                if (matched_partitions == 0) {
                    // The query point is outside the search space
                    // Set the range to include the closest partition
                    globalIndex.knn(queryPoint.x, queryPoint.y, 1, new ResultCollector2<Partition, Double>() {
                        @Override
                        public void collect(Partition r, Double s) {
                            maximum_distance.set(s);
                        }
                    });
                }
                range_for_next_iteration = new Circle(queryPoint.x, queryPoint.y, maximum_distance.get() * 2);
                LOG.info("Expanding to " + maximum_distance.get() * 2);
            } else {
                // Calculate the new test range which is a circle centered
                // at the
                // query point and distance to the k^{th} neighbor

                // Get distance to the kth neighbor
                final DoubleWritable distance_to_kth_neighbor = new DoubleWritable();
                FileStatus[] results = outFs.listStatus(outputPath);
                for (FileStatus result_file : results) {
                    if (result_file.getLen() > 0 && result_file.getPath().getName().startsWith("part-")) {
                        // Read the last line (kth neighbor)
                        Tail.tail(outFs, result_file.getPath(), 1, new TextWithDistance(),
                                new ResultCollector<TextWithDistance>() {

                                    @Override
                                    public void collect(TextWithDistance r) {
                                        distance_to_kth_neighbor.set(r.distance);
                                    }
                                });
                    }
                }
                range_for_next_iteration = new Circle(queryPoint.x, queryPoint.y,
                        distance_to_kth_neighbor.get());
                LOG.info("Expanding to kth neighbor: " + distance_to_kth_neighbor);
            }

            // Calculate the number of blocks to be processed to check the
            // terminating condition;
            additional_blocks_2b_processed.set(0);
            final Shape temp = range_for_this_iteration;
            globalIndex.rangeQuery(range_for_next_iteration, new ResultCollector<Partition>() {
                @Override
                public void collect(Partition p) {
                    if (!(p.isIntersected(temp))) {
                        additional_blocks_2b_processed.set(additional_blocks_2b_processed.get() + 1);
                    }
                }
            });
            range_for_this_iteration = range_for_next_iteration;
        }
    } while (additional_blocks_2b_processed.get() > 0);

    // If output file is not set by user, delete it
    if (userOutputPath == null)
        outFs.delete(outputPath, true);
    TotalIterations.addAndGet(iterations);

    return job;
}

From source file:eu.stratosphere.hadoopcompatibility.datatypes.DefaultStratosphereTypeConverter.java

License:Apache License

@SuppressWarnings("unchecked")
private <T> T convert(Record stratosphereType, int pos, Class<T> hadoopType) {
    if (hadoopType == LongWritable.class) {
        return (T) new LongWritable((stratosphereType.getField(pos, LongValue.class)).getValue());
    }/*from  w w  w  .  jav a2  s.c o m*/
    if (hadoopType == org.apache.hadoop.io.Text.class) {
        return (T) new Text((stratosphereType.getField(pos, StringValue.class)).getValue());
    }
    if (hadoopType == org.apache.hadoop.io.IntWritable.class) {
        return (T) new IntWritable((stratosphereType.getField(pos, IntValue.class)).getValue());
    }
    if (hadoopType == org.apache.hadoop.io.FloatWritable.class) {
        return (T) new FloatWritable((stratosphereType.getField(pos, FloatValue.class)).getValue());
    }
    if (hadoopType == org.apache.hadoop.io.DoubleWritable.class) {
        return (T) new DoubleWritable((stratosphereType.getField(pos, DoubleValue.class)).getValue());
    }
    if (hadoopType == org.apache.hadoop.io.BooleanWritable.class) {
        return (T) new BooleanWritable((stratosphereType.getField(pos, BooleanValue.class)).getValue());
    }
    if (hadoopType == org.apache.hadoop.io.ByteWritable.class) {
        return (T) new ByteWritable((stratosphereType.getField(pos, ByteValue.class)).getValue());
    }

    throw new RuntimeException("Unable to convert Stratosphere type ("
            + stratosphereType.getClass().getCanonicalName() + ") to Hadoop.");
}

From source file:full_MapReduce.FindBestAttributeMapper.java

License:Open Source License

public void map(Text key, MapWritable value, Context context) throws IOException, InterruptedException {
    TextArrayWritable values = getValues(value);
    Map<Text, Integer> tuple_per_split = getTuplePerSplit(value);

    int tot_tuple = 0;
    for (Integer i : tuple_per_split.values()) {
        tot_tuple += i;//from  www .  java 2  s  . com
    }

    double global_entropy = global_entropy(value, tot_tuple);
    double gain = gain(global_entropy, tuple_per_split, value, tot_tuple);
    DoubleWritable gain_ratio = new DoubleWritable(gainRatio(gain, tuple_per_split, tot_tuple));

    context.write(NullWritable.get(), new AttributeGainRatioWritable(key, values, gain_ratio));
}

From source file:full_MapReduce.FindBestAttributeReducer.java

License:Open Source License

public void reduce(NullWritable key, Iterable<AttributeGainRatioWritable> values, Context context)
        throws IOException, InterruptedException {
    int nb_attributes_left = -1;
    TextArrayWritable DEBUG = new TextArrayWritable();
    DEBUG.set(new Text[] { new Text("ERROR") });
    AttributeGainRatioWritable best_attribute = new AttributeGainRatioWritable(new Text("ERROR"), DEBUG,
            new DoubleWritable(0.0));
    DoubleWritable best_gain_ratio = new DoubleWritable(-Double.MAX_VALUE);

    for (AttributeGainRatioWritable value : values) {
        ++nb_attributes_left;//from w  w  w.  jav a2  s.c o m
        if (value.getGainRatio().compareTo(best_gain_ratio) > 0) {
            best_gain_ratio = new DoubleWritable(value.getGainRatio().get());
            best_attribute.set(new Text(value.getname()), new TextArrayWritable(value.getValues()),
                    new DoubleWritable(value.getGainRatio().get()));
        }
    }

    context.write(key, new Text(best_attribute.toString() + "," + nb_attributes_left));
}

From source file:graphvis.engine.FruchtermanReingoldGraphVis.java

License:MIT License

/**
* Apply the FruchtermanReingold algorithm on every vertex. Divided in to 6 supersteps.
* @param vertex the vertex to calculate on
* @param messages messages received from previous superstep
* @throws IOException//from  www.ja  v  a 2s. c  om
*/
@Override
public void compute(Vertex<LongWritable, VertexValueWritable, EdgeValueWritable> vertex,
        Iterable<MessageWritable> messages) throws IOException {
    // Super Step 0
    if (getSuperstep() % SUPERSTEPS == 0) {
        // Everybody is awake
        //Get default aggregator values.
        double T = ((DoubleWritable) aggregator.getAggregatedValue("T")).get();
        double k = ((DoubleWritable) aggregator.getAggregatedValue("k")).get();

        // Very first superstep: init in random position
        if (getSuperstep() == 0) {
            Random random = new Random();
            VectorWritable pos = new VectorWritable((random.nextDouble() - 0.5) * 100.0,
                    (random.nextDouble() - 0.5) * 100.0);

            VectorWritable disp = new VectorWritable(0.0, 0.0);
            VertexValueWritable vertexValue = new VertexValueWritable(pos, disp);
            vertex.setValue(vertexValue);

            if (vertex.getId().get() == 1) {
                //Initialize aggregator values.
                aggregator.aggregate("k", new DoubleWritable(-k + Math.sqrt(AREA / getTotalNumVertices())));
                aggregator.aggregate("T", new DoubleWritable(-T + W / 10));
                T = W / 10;
            }
        } else {
            // If it's not the very first superstep, let's chill!
            if (vertex.getId().get() == 1) {
                //cool
                aggregator.aggregate("T", new DoubleWritable(-SPEED));
                T = T - SPEED;
            }
        }

        // If we're not frozen yet, wake everyone up and send own position to everyone for next superstep
        if (T > 0 || (T < MIN_DIST && T > -MIN_DIST && vertex.getId().get() == 1)) {

            LongWritable ownId = vertex.getId();
            long intOwnId = ownId.get();

            VectorWritable ownPos = new VectorWritable(vertex.getValue().getPos().getX(),
                    vertex.getValue().getPos().getY());

            long totalVertices = getTotalNumVertices();

            // We assume that vertices are numbered 1..n where n is the number
            // of vertices
            for (long i = 1; i <= totalVertices; i++) {
                // Send position messages to everyone except self
                if (i != intOwnId) {
                    sendMessage(new LongWritable(i), new MessageWritable(ownId, ownPos));
                }
            }
        }
    } else if (getSuperstep() % SUPERSTEPS == 1) {
        // Everybody is awake

        // calculate repulsive forces between everyone
        VertexValueWritable vertexValue = vertex.getValue();
        VectorWritable pos = vertexValue.getPos();
        // We start with zero displacement
        VectorWritable disp = new VectorWritable(0.0, 0.0);

        for (MessageWritable messageWritable : messages) {

            VectorWritable otherPos = messageWritable.getPos();
            VectorWritable delta = pos.subtract(otherPos);
            double deltaLength = delta.length();

            // if dots are in the same place, let's try to separate them
            if (deltaLength < MIN_DIST) {
                delta = makeUpDelta();
                deltaLength = delta.length();
            }

            // Update displacement
            disp = disp.add(delta.multiply(fr(deltaLength) / deltaLength));
        }

        // set new disp
        vertex.setValue(new VertexValueWritable(pos, disp));

        // Send position to neighbors
        VectorWritable ownPos = new VectorWritable(pos.getX(), pos.getY());
        LongWritable ownId = vertex.getId();

        for (Edge<LongWritable, EdgeValueWritable> edge : vertex.getEdges()) {

            sendMessage(edge.getTargetVertexId(), new MessageWritable(ownId, ownPos));
        }

    } else if (getSuperstep() % SUPERSTEPS == 2) {
        // Vertices with in-edges are awake

        // anyone who received a message, calculate displacement, then
        // reply and wait, because in the next step they might get more messages
        // from vertices which are connected by out-edges

        // param3 true: send a message back with position
        applyAttractiveForces(vertex, messages, true);

        // if there are no out-edges, move, otherwise wait until next step and move then
        if (vertex.getNumEdges() == 0) {
            move(vertex);
        }
    } else if (getSuperstep() % SUPERSTEPS == 3) {
        // Vertices with out-edges are awake

        // param3 true: no need to send another message back
        applyAttractiveForces(vertex, messages, false);

        // move, those who don't have out-edges have already moved
        move(vertex);

        // Wake up vertices with in-edges
        for (Edge<LongWritable, EdgeValueWritable> edge : vertex.getEdges()) {

            sendMessage(edge.getTargetVertexId(), new MessageWritable(vertex.getId(), new VectorWritable()));
        }
    } else if (getSuperstep() % SUPERSTEPS == 4) {
        // Vertices with in-edges are awake

        LongWritable ownId = new LongWritable(vertex.getId().get());

        VectorWritable ownPos = new VectorWritable(vertex.getValue().getPos().getX(),
                vertex.getValue().getPos().getY());

        // Send new position back to everyone from whom a msg was rcvd
        for (MessageWritable messageWritable : messages) {

            sendMessage(messageWritable.getSrcId(), new MessageWritable(ownId, ownPos));
        }
    } else if (getSuperstep() % SUPERSTEPS == 5) {
        // Vertices with out-edges are awake

        // Set neighbor's position in edge value
        for (MessageWritable messageWritable : messages) {

            long srcId = messageWritable.getSrcId().get();

            VectorWritable pos = new VectorWritable(messageWritable.getPos().getX(),
                    messageWritable.getPos().getY());

            for (Edge<LongWritable, EdgeValueWritable> edge : vertex.getEdges()) {

                long targetId = edge.getTargetVertexId().get();

                if (targetId == srcId) {

                    // Keep weight
                    LongWritable weight = new LongWritable(edge.getValue().getWeight().get());

                    vertex.setEdgeValue(new LongWritable(targetId), new EdgeValueWritable(weight, pos));
                }
            }
        }

        // Wake everyone up for the next superstep, including self
        long totalVertices = getTotalNumVertices();

        for (int i = 1; i <= totalVertices; i++) {
            sendMessage(new LongWritable(i), new MessageWritable());
        }
    }

    vertex.voteToHalt();
}

From source file:graphvis.test.FruchtermanReingoldGraphVisTest.java

License:MIT License

/**
 * Test superstep6/* w ww.j  a  v a  2  s.c  om*/
 */
@Test
public void testSuperstep6() throws Exception {

    Vertex<LongWritable, VertexValueWritable, EdgeValueWritable> vertex = new DefaultVertex<LongWritable, VertexValueWritable, EdgeValueWritable>();

    FruchtermanReingoldGraphVis computation = mock(FruchtermanReingoldGraphVis.class);

    when(computation.getAggregatedValue("T")).thenReturn(new DoubleWritable(1000));
    when(computation.getAggregatedValue("k")).thenReturn(new DoubleWritable(4000000));

    //double tBefore = computation.getT();

    MockUtils.prepareVertexAndComputation(vertex, new LongWritable(1L), new VertexValueWritable(), false,
            computation, 6L);

    vertex.setValue(new VertexValueWritable());

    vertex.addEdge(EdgeFactory.create(new LongWritable(2), new EdgeValueWritable()));
    vertex.addEdge(EdgeFactory.create(new LongWritable(3), new EdgeValueWritable()));

    computation.compute(vertex, new ArrayList<MessageWritable>());
    //double tAfter = computation.getT();

    //assertTrue(vertex.isHalted());
    //cool should be called in superstep6
    //assertEquals(10,tBefore-tAfter,0);

}

From source file:graphvis.test.FruchtermanReingoldGraphVisTest.java

License:MIT License

/**
 * Test superstep606//from  www  .  java  2  s . com
 */
@Test
public void testSuperstep606() throws Exception {

    Vertex<LongWritable, VertexValueWritable, EdgeValueWritable> vertex = new DefaultVertex<LongWritable, VertexValueWritable, EdgeValueWritable>();

    FruchtermanReingoldGraphVis computation = mock(FruchtermanReingoldGraphVis.class);

    when(computation.getAggregatedValue("T")).thenReturn(new DoubleWritable(1000));
    when(computation.getAggregatedValue("k")).thenReturn(new DoubleWritable(4000000));

    //double tBefore = computation.getT();

    MockUtils.MockedEnvironment<LongWritable, VertexValueWritable, EdgeValueWritable, MessageWritable> env = MockUtils
            .prepareVertexAndComputation(vertex, new LongWritable(1L), new VertexValueWritable(), false,
                    computation, 606L);

    vertex.setValue(new VertexValueWritable());

    vertex.addEdge(EdgeFactory.create(new LongWritable(2L), new EdgeValueWritable()));
    vertex.addEdge(EdgeFactory.create(new LongWritable(3L), new EdgeValueWritable()));

    computation.compute(vertex, new ArrayList<MessageWritable>());
    //double tAfter = computation.getT();

    //assertTrue(vertex.isHalted());
    //cool should be called in superstep6
    //assertEquals(10,tBefore-tAfter,0);
    //computation should stop after this superstep, so no messages should be sent
    env.verifyNoMessageSent();

}

From source file:hadoop.twitter.mapreduce.UserWritable.java

public UserWritable() {
    pageRank = new DoubleWritable(1);
    followee = new Text();
}