Example usage for org.apache.hadoop.io DoubleWritable get

List of usage examples for org.apache.hadoop.io DoubleWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io DoubleWritable get.

Prototype

public double get() 

Source Link

Usage

From source file:org.apache.giraph.types.ops.DoubleTypeOps.java

License:Apache License

@Override
public DoubleWritable createCopy(DoubleWritable from) {
    return new DoubleWritable(from.get());
}

From source file:org.apache.giraph.types.ops.DoubleTypeOps.java

License:Apache License

@Override
public void set(DoubleWritable to, DoubleWritable from) {
    to.set(from.get());
}

From source file:org.apache.giraph.writable.kryo.DirectWritableSerializerCopyTest.java

License:Apache License

@Test
public void test1() {
    DoubleWritable value = new DoubleWritable(5.9999);
    DirectWritableSerializer<DoubleWritable> serializer = new DirectWritableSerializer<>();
    Kryo kryo = new Kryo();
    DoubleWritable copy = serializer.copy(kryo, value);
    Assert.assertEquals(value.get(), copy.get(), 0);
}

From source file:org.apache.giraph.writable.kryo.DirectWritableSerializerCopyTest.java

License:Apache License

@Test
public void test2() {
    WDoubleArrayList list = new WDoubleArrayList();
    list.addW(new DoubleWritable(0.11111111));
    list.addW(new DoubleWritable(1000.9));
    list.addW(new DoubleWritable(99999999.99999999));
    DirectWritableSerializer<WDoubleArrayList> serializer = new DirectWritableSerializer<>();
    Kryo kryo = new Kryo();
    WDoubleArrayList copy = serializer.copy(kryo, list);
    DoubleWritable reusable = new DoubleWritable();
    copy.getIntoW(0, reusable);/*from  ww  w .  jav a2s .c o m*/
    Assert.assertEquals(0.11111111, reusable.get(), 0);
    copy.getIntoW(1, reusable);
    Assert.assertEquals(1000.9, reusable.get(), 0);
    copy.getIntoW(2, reusable);
    Assert.assertEquals(99999999.99999999, reusable.get(), 0);
}

From source file:org.apache.hama.examples.SpMV.java

License:Apache License

/**
 * SpMV produces a file, which contains result dense vector in format of pairs
 * of integer and double. The aim of this method is to convert SpMV output to
 * format usable in subsequent computation - dense vector. It can be usable
 * for iterative solvers. IMPORTANT: currently it is used in SpMV. It can be a
 * bottle neck, because all input needs to be stored in memory.
 * /*from   ww  w. j  a va  2 s .c o  m*/
 * @param SpMVoutputPathString output path, which represents directory with
 *          part files.
 * @param conf configuration
 * @return path to output vector.
 * @throws IOException
 */
public static String convertSpMVOutputToDenseVector(String SpMVoutputPathString, HamaConfiguration conf)
        throws IOException {
    List<Integer> indeces = new ArrayList<Integer>();
    List<Double> values = new ArrayList<Double>();

    FileSystem fs = FileSystem.get(conf);
    Path SpMVOutputPath = new Path(SpMVoutputPathString);
    Path resultOutputPath = SpMVOutputPath.getParent().suffix("/result");
    FileStatus[] stats = fs.listStatus(SpMVOutputPath);
    for (FileStatus stat : stats) {
        String filePath = stat.getPath().toUri().getPath();
        SequenceFile.Reader reader = null;
        fs.open(new Path(filePath));
        try {
            reader = new SequenceFile.Reader(fs, new Path(filePath), conf);
            IntWritable key = new IntWritable();
            DoubleWritable value = new DoubleWritable();
            while (reader.next(key, value)) {
                indeces.add(key.get());
                values.add(value.get());
            }
        } catch (IOException e) {
            throw new RuntimeException(e);
        } finally {
            if (reader != null)
                reader.close();
        }
    }
    DenseVectorWritable result = new DenseVectorWritable();
    result.setSize(indeces.size());
    for (int i = 0; i < indeces.size(); i++)
        result.addCell(indeces.get(i), values.get(i));
    writeToFile(resultOutputPath.toString(), result, conf);
    return resultOutputPath.toString();
}

From source file:org.apache.hama.examples.util.WritableUtil.java

License:Apache License

/**
 * SpMV produces a file, which contains result dense vector in format of pairs
 * of integer and double. The aim of this method is to convert SpMV output to
 * format usable in subsequent computation - dense vector. It can be usable
 * for iterative solvers. IMPORTANT: currently it is used in SpMV. It can be a
 * bottle neck, because all input needs to be stored in memory.
 * /*www . j  a v  a 2  s  . c  om*/
 * @param SpMVoutputPathString
 *          output path, which represents directory with part files.
 * @param conf
 *          configuration
 * @return path to output vector.
 * @throws IOException
 */
public static String convertSpMVOutputToDenseVector(String SpMVoutputPathString, Configuration conf)
        throws IOException {
    List<Integer> indeces = new ArrayList<Integer>();
    List<Double> values = new ArrayList<Double>();

    FileSystem fs = FileSystem.get(conf);
    Path SpMVOutputPath = new Path(SpMVoutputPathString);
    Path resultOutputPath = SpMVOutputPath.getParent().suffix("/result");
    FileStatus[] stats = fs.listStatus(SpMVOutputPath);
    for (FileStatus stat : stats) {
        String filePath = stat.getPath().toUri().getPath();
        SequenceFile.Reader reader = null;
        fs.open(new Path(filePath));
        try {
            reader = new SequenceFile.Reader(fs, new Path(filePath), conf);
            IntWritable key = new IntWritable();
            DoubleWritable value = new DoubleWritable();
            while (reader.next(key, value)) {
                indeces.add(key.get());
                values.add(value.get());
            }
        } catch (IOException e) {
            throw new RuntimeException(e);
        } finally {
            if (reader != null)
                reader.close();
        }
    }
    DenseVectorWritable result = new DenseVectorWritable();
    result.setSize(indeces.size());
    for (int i = 0; i < indeces.size(); i++)
        result.addCell(indeces.get(i), values.get(i));
    writeToFile(resultOutputPath.toString(), result, conf);
    return resultOutputPath.toString();
}

From source file:org.apache.hama.graph.AbsDiffAggregator.java

License:Apache License

@Override
public void aggregate(DoubleWritable oldValue, DoubleWritable newValue) {
    // make sure it's nullsafe
    if (oldValue != null) {
        absoluteDifference += Math.abs(oldValue.get() - newValue.get());
    }/*from  www .  j  ava 2 s.  c  o  m*/
}

From source file:org.apache.hama.graph.AbsDiffAggregator.java

License:Apache License

@Override
public void aggregate(DoubleWritable value) {
    absoluteDifference += value.get();
}

From source file:org.apache.hama.graph.TestAverageAggregator.java

License:Apache License

@Test
public void testAggregator() {
    AverageAggregator diff = new AverageAggregator();
    diff.aggregate(new DoubleWritable(5), new DoubleWritable(2));
    diff.aggregateInternal();/* www.  j a  v a 2 s.c o m*/
    diff.aggregate(new DoubleWritable(5), new DoubleWritable(2));
    diff.aggregateInternal();
    diff.aggregate(null, new DoubleWritable(5));
    diff.aggregateInternal();

    assertEquals(3, diff.getTimesAggregated().get());
    DoubleWritable x = diff.finalizeAggregation();
    assertEquals(2, (int) x.get());

}

From source file:org.apache.hama.graph.TestSubmitGraphJob.java

License:Apache License

private void verifyResult() throws IOException {
    double sum = 0.0;
    FileStatus[] globStatus = fs.globStatus(new Path(OUTPUT + "/part-*"));
    for (FileStatus fts : globStatus) {
        SequenceFile.Reader reader = new SequenceFile.Reader(fs, fts.getPath(), configuration);
        Text key = new Text();
        DoubleWritable value = new DoubleWritable();

        while (reader.next(key, value)) {
            sum += value.get();
        }//from  w  ww . ja  va2  s  .c  o m
        reader.close();
    }
    LOG.info("Sum is: " + sum);
    assertTrue("Sum was: " + sum, sum > 0.9d && sum <= 1.1d);
}