Example usage for org.apache.hadoop.io DoubleWritable get

List of usage examples for org.apache.hadoop.io DoubleWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io DoubleWritable get.

Prototype

public double get() 

Source Link

Usage

From source file:edu.umn.cs.sthadoop.operations.HSPKNNQ.java

License:Open Source License

/**
 * A MapReduce version of KNN query./*from  ww w. ja  v  a 2 s  . com*/
 * @param fs
 * @param inputPath
 * @param queryPoint
 * @param shape
 * @param output
 * @return
 * @throws IOException
 * @throws InterruptedException 
 * @throws ClassNotFoundException 
 */
private static <S extends Shape> Job knnMapReduce(Path inputPath, Path userOutputPath, OperationsParams params)
        throws IOException, ClassNotFoundException, InterruptedException {
    Job job = new Job(params, "PKNN");
    job.setJarByClass(HSPKNNQ.class);

    FileSystem inFs = inputPath.getFileSystem(params);
    job.setInputFormatClass(SpatialInputFormat3.class);
    SpatialInputFormat3.setInputPaths(job, inputPath);

    job.setMapperClass(KNNMap.class);
    job.setMapOutputKeyClass(NullWritable.class);
    job.setMapOutputValueClass(TextWithDistance.class);

    job.setReducerClass(KNNReduce.class);
    job.setNumReduceTasks(1);

    job.getConfiguration().setClass(SpatialSite.FilterClass, RangeFilter.class, BlockFilter.class);
    final Point queryPoint = (Point) params.getShape("point");
    final int k = params.getInt("k", 1);

    final IntWritable additional_blocks_2b_processed = new IntWritable(0);
    long resultCount;
    int iterations = 0;

    Path outputPath = userOutputPath;
    if (outputPath == null) {
        do {
            outputPath = new Path(inputPath.getName() + ".knn_" + (int) (Math.random() * 1000000));
        } while (inFs.exists(outputPath));
    }
    job.setOutputFormatClass(TextOutputFormat3.class);
    TextOutputFormat3.setOutputPath(job, outputPath);

    GlobalIndex<Partition> globalIndex = SpatialSite.getGlobalIndex(inFs, inputPath);
    Configuration templateConf = job.getConfiguration();

    FileSystem outFs = outputPath.getFileSystem(params);
    // Start with the query point to select all partitions overlapping with it
    Shape range_for_this_iteration = new Point(queryPoint.x, queryPoint.y);

    do {
        job = new Job(templateConf);
        // Delete results of last iteration if not first iteration
        if (outputPath != null)
            outFs.delete(outputPath, true);

        LOG.info("Running iteration: " + (++iterations));
        // Set query range for the SpatialInputFormat
        OperationsParams.setShape(job.getConfiguration(), RangeFilter.QueryRange, range_for_this_iteration);

        // Submit the job
        if (params.getBoolean("background", false)) {
            // XXX this is incorrect because if the job needs multiple iterations,
            // it will run only the first one
            job.waitForCompletion(false);
            return job;
        }
        job.waitForCompletion(false);

        // Retrieve answers for this iteration
        Counters counters = job.getCounters();
        Counter resultSizeCounter = counters.findCounter(Task.Counter.REDUCE_OUTPUT_RECORDS);
        resultCount = resultSizeCounter.getValue();

        if (globalIndex != null) {
            Circle range_for_next_iteration;
            if (resultCount < k) {
                LOG.info("Found only " + resultCount + " results");
                // Did not find enough results in the query space
                // Increase the distance by doubling the maximum distance among all
                // partitions that were processed
                final DoubleWritable maximum_distance = new DoubleWritable(0);
                int matched_partitions = globalIndex.rangeQuery(range_for_this_iteration,
                        new ResultCollector<Partition>() {
                            @Override
                            public void collect(Partition p) {
                                double distance = p.getMaxDistanceTo(queryPoint.x, queryPoint.y);
                                if (distance > maximum_distance.get())
                                    maximum_distance.set(distance);
                            }
                        });
                if (matched_partitions == 0) {
                    // The query point is outside the search space
                    // Set the range to include the closest partition
                    globalIndex.knn(queryPoint.x, queryPoint.y, 1, new ResultCollector2<Partition, Double>() {
                        @Override
                        public void collect(Partition r, Double s) {
                            maximum_distance.set(s);
                        }
                    });
                }
                range_for_next_iteration = new Circle(queryPoint.x, queryPoint.y, maximum_distance.get() * 2);
                LOG.info("Expanding to " + maximum_distance.get() * 2);
            } else {
                // Calculate the new test range which is a circle centered at the
                // query point and distance to the k^{th} neighbor

                // Get distance to the kth neighbor
                final DoubleWritable distance_to_kth_neighbor = new DoubleWritable();
                FileStatus[] results = outFs.listStatus(outputPath);
                for (FileStatus result_file : results) {
                    if (result_file.getLen() > 0 && result_file.getPath().getName().startsWith("part-")) {
                        // Read the last line (kth neighbor)
                        Tail.tail(outFs, result_file.getPath(), 1, new TextWithDistance(),
                                new ResultCollector<TextWithDistance>() {

                                    @Override
                                    public void collect(TextWithDistance r) {
                                        distance_to_kth_neighbor.set(r.distance);
                                    }
                                });
                    }
                }
                range_for_next_iteration = new Circle(queryPoint.x, queryPoint.y,
                        distance_to_kth_neighbor.get());
                LOG.info("Expanding to kth neighbor: " + distance_to_kth_neighbor);
            }

            // Calculate the number of blocks to be processed to check the
            // terminating condition;
            additional_blocks_2b_processed.set(0);
            final Shape temp = range_for_this_iteration;
            globalIndex.rangeQuery(range_for_next_iteration, new ResultCollector<Partition>() {
                @Override
                public void collect(Partition p) {
                    if (!(p.isIntersected(temp))) {
                        additional_blocks_2b_processed.set(additional_blocks_2b_processed.get() + 1);
                    }
                }
            });
            range_for_this_iteration = range_for_next_iteration;
        }
    } while (additional_blocks_2b_processed.get() > 0);

    // If output file is not set by user, delete it
    if (userOutputPath == null)
        outFs.delete(outputPath, true);
    TotalIterations.addAndGet(iterations);

    return job;
}

From source file:edu.umn.cs.sthadoop.trajectory.KNNDTW.java

License:Open Source License

/**
 * A MapReduce version of KNN query.//  www .j  a v a2  s . co m
 * 
 * @param fs
 * @param inputPath
 * @param queryPoint
 * @param shape
 * @param output
 * @return
 * @throws IOException
 * @throws InterruptedException
 * @throws ClassNotFoundException
 */
private static <S extends Shape> Job knnMapReduce(Path inputPath, Path userOutputPath, OperationsParams params)
        throws IOException, ClassNotFoundException, InterruptedException {
    Job job = new Job(params, "KNN");
    job.setJarByClass(KNNDTW.class);

    FileSystem inFs = inputPath.getFileSystem(params);
    job.setInputFormatClass(SpatialInputFormat3.class);
    SpatialInputFormat3.setInputPaths(job, inputPath);

    job.setMapperClass(KNNMap.class);
    job.setMapOutputKeyClass(NullWritable.class);
    job.setMapOutputValueClass(TextWithDistance.class);

    job.setReducerClass(KNNReduce.class);
    job.setNumReduceTasks(1);

    job.getConfiguration().setClass(SpatialSite.FilterClass, RangeFilter.class, BlockFilter.class);
    final Point queryPoint = (Point) params.getShape("point");
    final int k = params.getInt("k", 1);

    final IntWritable additional_blocks_2b_processed = new IntWritable(0);
    long resultCount;
    int iterations = 0;

    Path outputPath = userOutputPath;
    if (outputPath == null) {
        do {
            outputPath = new Path(inputPath.getName() + ".knn_" + (int) (Math.random() * 1000000));
        } while (inFs.exists(outputPath));
    }
    job.setOutputFormatClass(TextOutputFormat3.class);
    TextOutputFormat3.setOutputPath(job, outputPath);

    GlobalIndex<Partition> globalIndex = SpatialSite.getGlobalIndex(inFs, inputPath);
    Configuration templateConf = job.getConfiguration();

    FileSystem outFs = outputPath.getFileSystem(params);
    // Start with the query point to select all partitions overlapping with
    // it
    Shape range_for_this_iteration = new Point(queryPoint.x, queryPoint.y);

    do {
        job = new Job(templateConf);
        // Delete results of last iteration if not first iteration
        if (outputPath != null)
            outFs.delete(outputPath, true);

        LOG.info("Running iteration: " + (++iterations));
        // Set query range for the SpatialInputFormat
        OperationsParams.setShape(job.getConfiguration(), RangeFilter.QueryRange, range_for_this_iteration);

        // Submit the job
        if (params.getBoolean("background", false)) {
            // XXX this is incorrect because if the job needs multiple
            // iterations,
            // it will run only the first one
            job.waitForCompletion(false);
            return job;
        }
        job.waitForCompletion(false);

        // Retrieve answers for this iteration
        Counters counters = job.getCounters();
        Counter resultSizeCounter = counters.findCounter(Task.Counter.REDUCE_OUTPUT_RECORDS);
        resultCount = resultSizeCounter.getValue();

        if (globalIndex != null) {
            Circle range_for_next_iteration;
            if (resultCount < k) {
                LOG.info("Found only " + resultCount + " results");
                // Did not find enough results in the query space
                // Increase the distance by doubling the maximum distance
                // among all
                // partitions that were processed
                final DoubleWritable maximum_distance = new DoubleWritable(0);
                int matched_partitions = globalIndex.rangeQuery(range_for_this_iteration,
                        new ResultCollector<Partition>() {
                            @Override
                            public void collect(Partition p) {
                                double distance = p.getMaxDistanceTo(queryPoint.x, queryPoint.y);
                                if (distance > maximum_distance.get())
                                    maximum_distance.set(distance);
                            }
                        });
                if (matched_partitions == 0) {
                    // The query point is outside the search space
                    // Set the range to include the closest partition
                    globalIndex.knn(queryPoint.x, queryPoint.y, 1, new ResultCollector2<Partition, Double>() {
                        @Override
                        public void collect(Partition r, Double s) {
                            maximum_distance.set(s);
                        }
                    });
                }
                range_for_next_iteration = new Circle(queryPoint.x, queryPoint.y, maximum_distance.get() * 2);
                LOG.info("Expanding to " + maximum_distance.get() * 2);
            } else {
                // Calculate the new test range which is a circle centered
                // at the
                // query point and distance to the k^{th} neighbor

                // Get distance to the kth neighbor
                final DoubleWritable distance_to_kth_neighbor = new DoubleWritable();
                FileStatus[] results = outFs.listStatus(outputPath);
                for (FileStatus result_file : results) {
                    if (result_file.getLen() > 0 && result_file.getPath().getName().startsWith("part-")) {
                        // Read the last line (kth neighbor)
                        Tail.tail(outFs, result_file.getPath(), 1, new TextWithDistance(),
                                new ResultCollector<TextWithDistance>() {

                                    @Override
                                    public void collect(TextWithDistance r) {
                                        distance_to_kth_neighbor.set(r.distance);
                                    }
                                });
                    }
                }
                range_for_next_iteration = new Circle(queryPoint.x, queryPoint.y,
                        distance_to_kth_neighbor.get());
                LOG.info("Expanding to kth neighbor: " + distance_to_kth_neighbor);
            }

            // Calculate the number of blocks to be processed to check the
            // terminating condition;
            additional_blocks_2b_processed.set(0);
            final Shape temp = range_for_this_iteration;
            globalIndex.rangeQuery(range_for_next_iteration, new ResultCollector<Partition>() {
                @Override
                public void collect(Partition p) {
                    if (!(p.isIntersected(temp))) {
                        additional_blocks_2b_processed.set(additional_blocks_2b_processed.get() + 1);
                    }
                }
            });
            range_for_this_iteration = range_for_next_iteration;
        }
    } while (additional_blocks_2b_processed.get() > 0);

    // If output file is not set by user, delete it
    if (userOutputPath == null)
        outFs.delete(outputPath, true);
    TotalIterations.addAndGet(iterations);

    return job;
}

From source file:hadoop.mongo.treasury.TreasuryYieldReducer.java

License:Apache License

@Override
public void reduce(final IntWritable pKey, final Iterable<DoubleWritable> pValues, final Context pContext)
        throws IOException, InterruptedException {

    int count = 0;
    double sum = 0;
    for (final DoubleWritable value : pValues) {
        sum += value.get();
        count++;/*from   ww  w.  j a v  a2 s. c om*/
    }

    final double avg = sum / count;

    if (LOG.isDebugEnabled()) {
        LOG.debug("Average 10 Year Treasury for " + pKey.get() + " was " + avg);
    }
    System.out.println("Average 10 Year Treasury for " + pKey.get() + " was " + avg);
    BasicBSONObject output = new BasicBSONObject();
    output.put("count", count);
    output.put("avg", avg);
    output.put("sum", sum);
    reduceResult.setDoc(output);
    pContext.write(pKey, reduceResult);
}

From source file:hadoop.mongo.treasury.TreasuryYieldUpdateReducer.java

License:Apache License

@Override
public void reduce(final IntWritable pKey, final Iterable<DoubleWritable> pValues, final Context pContext)
        throws IOException, InterruptedException {

    int count = 0;
    double sum = 0;
    for (final DoubleWritable value : pValues) {
        sum += value.get();
        count++;/*from www  . ja va 2  s  .  co  m*/
    }

    final double avg = sum / count;

    if (LOG.isDebugEnabled()) {
        LOG.debug("Average 10 Year Treasury for " + pKey.get() + " was " + avg);
    }

    BasicBSONObject query = new BasicBSONObject("_id", pKey.get());
    BasicBSONObject modifiers = new BasicBSONObject();
    modifiers.put("$set",
            BasicDBObjectBuilder.start().add("count", count).add("avg", avg).add("sum", sum).get());

    modifiers.put("$push", new BasicBSONObject("calculatedAt", new Date()));
    modifiers.put("$inc", new BasicBSONObject("numCalculations", 1));
    reduceResult.setQuery(query);
    reduceResult.setModifiers(modifiers);
    pContext.write(null, reduceResult);
}

From source file:Hama_MMMAS.HAMA_TEST.java

private double[][] handletheMessage(DoubleTwoDArrayWritable In) throws IOException {
    int col, row;
    DoubleWritable mid;
    Writable[][] temp;//from  www.  jav a 2s .c  om
    temp = In.get();
    col = temp.length;
    row = temp[0].length;
    double[][] Out = new double[col][row];
    for (int k1 = 0; k1 < col; k1++) {
        for (int j1 = 0; j1 < row; j1++) {
            mid = (DoubleWritable) temp[k1][j1];
            Out[k1][j1] = mid.get();
        }
    }
    return Out;
}

From source file:io.apigee.lembos.mapreduce.converters.input.DoubleWritableConverter.java

License:Apache License

/**
 * Takes in a {@link DoubleWritable} and returns a {@link Double}.
 *
 * @param scope the JavaScript scope//  w  w  w .ja  v a2  s.  c  om
 * @param writable the value to convert
 *
 * @return the {@link Double} equivalent
 */
@Override
public Object toJavaScript(final Scriptable scope, final DoubleWritable writable) {
    return writable.get();
}

From source file:it.uniroma1.bdc.piccioli.tesi.SimpleShortestPathsComputationTextValue.java

License:Apache License

@Override
public void compute(Vertex<Text, DoubleWritable, DoubleWritable> vertex, Iterable<DoubleWritable> messages)
        throws IOException {
    if (getSuperstep() == 0) {
        vertex.setValue(new DoubleWritable(Double.MAX_VALUE));
    }//from   ww  w  .j  a va  2 s . c  om
    double minDist = isSource(vertex) ? 0d : Double.MAX_VALUE;
    for (DoubleWritable message : messages) {
        minDist = Math.min(minDist, message.get());
    }
    if (LOG.isDebugEnabled()) {
        System.out.println("Vertex " + vertex.getId() + " got minDist = " + minDist + " vertex value = "
                + vertex.getValue());
    }
    if (minDist < new Double(vertex.getValue().toString())) {
        vertex.setValue(new DoubleWritable(minDist));
        for (Edge<Text, DoubleWritable> edge : vertex.getEdges()) {

            double distance = minDist + edge.getValue().get();

            if (LOG.isDebugEnabled()) {
                System.out.println(
                        "Vertex " + vertex.getId() + " sent to " + edge.getTargetVertexId() + " = " + distance);
            }
            sendMessage(edge.getTargetVertexId(), new DoubleWritable(distance));
        }
    }
    vertex.voteToHalt();
}

From source file:it.uniroma1.bdc.tesi.piccioli.giraphstandalone.sssp.text.SimpleShortestPathsComputationTextValue.java

License:Apache License

@Override
public void compute(Vertex<Text, DoubleWritable, DoubleWritable> vertex, Iterable<DoubleWritable> messages)
        throws IOException {

    if (getSuperstep() == 0) {
        vertex.setValue(new DoubleWritable(Double.MAX_VALUE));
    }/*from   w  w w. ja v a2 s.co  m*/
    double minDist = isSource(vertex) ? 0d : Double.MAX_VALUE;
    for (DoubleWritable message : messages) {
        minDist = Math.min(minDist, message.get());
    }
    if (LOG.isDebugEnabled()) {
        System.out.println("Vertex " + vertex.getId() + " got minDist = " + minDist + " vertex value = "
                + vertex.getValue());
    }
    if (minDist < new Double(vertex.getValue().toString())) {
        vertex.setValue(new DoubleWritable(minDist));
        for (Edge<Text, DoubleWritable> edge : vertex.getEdges()) {

            double distance = minDist + edge.getValue().get();

            if (LOG.isDebugEnabled()) {
                System.out.println(
                        "Vertex " + vertex.getId() + " sent to " + edge.getTargetVertexId() + " = " + distance);
            }
            sendMessage(edge.getTargetVertexId(), new DoubleWritable(distance));
        }
    }
    vertex.voteToHalt();
}

From source file:main.okapi.graphs.similarity.Jaccard.java

License:Apache License

/**
  * /*from   w  w  w . ja v a 2  s .  c o  m*/
  * Converts the [0,1] similarity value to a distance
  * which takes values in [0, INF].
  * 
  */
private static DoubleWritable covertToDistance(DoubleWritable value) {
    if (Math.abs(value.get()) > 0) {
        value.set((1.0 / value.get()) - 1.0);
    } else {
        value.set(Double.MAX_VALUE);
    }
    return value;
}

From source file:main.okapi.graphs.SimplePageRank.java

License:Apache License

@Override
public void compute(Vertex<LongWritable, DoubleWritable, FloatWritable> vertex,
        Iterable<DoubleWritable> messages) {
    if (getSuperstep() == 0) {
        vertex.setValue(new DoubleWritable(1f / getTotalNumVertices()));
    }//from   www. j ava2  s  .  c o  m
    if (getSuperstep() >= 1) {
        double sum = 0;
        for (DoubleWritable message : messages) {
            sum += message.get();
        }
        DoubleWritable vertexValue = new DoubleWritable((0.15f / getTotalNumVertices()) + 0.85f * sum);
        vertex.setValue(vertexValue);
    }

    if (getSuperstep() < getContext().getConfiguration().getInt(MAX_SUPERSTEPS, MAX_SUPERSTEPS_DEFAULT)) {

        long edges = vertex.getNumEdges();
        sendMessageToAllEdges(vertex, new DoubleWritable(vertex.getValue().get() / edges));
    } else {
        vertex.voteToHalt();
    }
}