Example usage for org.apache.hadoop.io DoubleWritable DoubleWritable

List of usage examples for org.apache.hadoop.io DoubleWritable DoubleWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io DoubleWritable DoubleWritable.

Prototype

public DoubleWritable(double value) 

Source Link

Usage

From source file:bookExamples.ch4.algorithms.SimpleShortestPathsComputation.java

License:Apache License

@Override
public void compute(Vertex<LongWritable, DoubleWritable, FloatWritable> vertex,
        Iterable<DoubleWritable> messages) throws IOException {
    if (getSuperstep() == 0) {
        vertex.setValue(new DoubleWritable(Double.MAX_VALUE));
    }/*from  www  .  j a  v  a2  s  . c om*/
    double minDist = isSource(vertex) ? 0d : Double.MAX_VALUE;
    for (DoubleWritable message : messages) {
        minDist = Math.min(minDist, message.get());
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug("Vertex " + vertex.getId() + " got minDist = " + minDist + " vertex value = "
                + vertex.getValue());
    }
    if (minDist < vertex.getValue().get()) {
        vertex.setValue(new DoubleWritable(minDist));
        for (Edge<LongWritable, FloatWritable> edge : vertex.getEdges()) {
            double distance = minDist + edge.getValue().get();
            if (LOG.isDebugEnabled()) {
                LOG.debug(
                        "Vertex " + vertex.getId() + " sent to " + edge.getTargetVertexId() + " = " + distance);
            }
            sendMessage(edge.getTargetVertexId(), new DoubleWritable(distance));
        }
    }
    vertex.voteToHalt();
}

From source file:co.cask.cdap.dq.test.AggregationFunctionsTest.java

License:Apache License

@Test
public void averageGenerateAggregationTest() throws Exception {
    DataQualityWritable val1 = new DataQualityWritable();
    val1.set(new DoubleWritable(2.0));

    DataQualityWritable val2 = new DataQualityWritable();
    val2.set(new DoubleWritable(2.0));

    Mean mean = new Mean();
    mean.add(val1);
    mean.add(val2);

    byte[] output = mean.aggregate();
    Assert.assertEquals(2.0, Bytes.toDouble(output), 0);
}

From source file:co.cask.cdap.dq.test.AggregationFunctionsTest.java

License:Apache License

@Test
public void standardDeviationGenerateAggregationTest() throws Exception {
    DataQualityWritable val1 = new DataQualityWritable();
    val1.set(new DoubleWritable(2.0));

    DataQualityWritable val2 = new DataQualityWritable();
    val2.set(new DoubleWritable(5.0));

    DataQualityWritable val3 = new DataQualityWritable();
    val3.set(new DoubleWritable(10.0));

    DataQualityWritable val4 = new DataQualityWritable();
    val4.set(new DoubleWritable(52.0));

    StandardDeviation standardDeviation = new StandardDeviation();
    standardDeviation.add(val1);
    standardDeviation.add(val2);
    standardDeviation.add(val3);
    standardDeviation.add(val4);
    byte[] output = standardDeviation.aggregate();

    Assert.assertEquals(20.265426, Bytes.toDouble(output), 0.001);
}

From source file:co.nubetech.hiho.dedup.TestDedupJob.java

License:Apache License

@Test
public void testDedupByCustomObjectKeyWithSequenceFileInputFormat() throws Exception {
    Student student1 = setStudent(new Text("Sam"), new Text("US"), new IntWritable(1),
            new LongWritable(9999999998l), new DoubleWritable(99.12));
    Student student2 = setStudent(new Text("John"), new Text("AUS"), new IntWritable(2),
            new LongWritable(9999999999l), new DoubleWritable(90.12));
    Student student3 = setStudent(new Text("Mary"), new Text("UK"), new IntWritable(3),
            new LongWritable(9999999988l), new DoubleWritable(69.12));
    Student student4 = setStudent(new Text("Kelvin"), new Text("UK"), new IntWritable(4),
            new LongWritable(9999998888l), new DoubleWritable(59.12));

    HashMap<Student, Text> inputData1 = new HashMap<Student, Text>();
    inputData1.put(student1, new Text("Macon Kent,6269 Aenean St.,1-247-399-1051,08253"));
    inputData1.put(student2, new Text("Dale Zamora,521-7792 Mauris Rd.,1-214-625-6970,90510"));
    inputData1.put(student3, new Text("Charles Wood,525-9709 In Rd.,1-370-528-4758,62714"));
    createSequenceFileInHdfs(inputData1, "/input1", "testFile1.seq");

    HashMap<Student, Text> inputData2 = new HashMap<Student, Text>();
    inputData2.put(student2, new Text("Austin Farley,4794 Donec Ave,1-230-823-8164,13508"));
    inputData2.put(student3, new Text("Macaulay Jackson,5435 Dui. Avenue,1-770-395-6446,31584"));
    inputData2.put(student4, new Text("Timon Leonard,716 Ac Ave,1-857-935-3882,62240"));
    createSequenceFileInHdfs(inputData2, "/input2", "testFile2.seq");

    String[] args = new String[] { "-inputPath", "/input1,/input2", "-outputPath", "output", "-dedupBy", "key",
            "-inputFormat", "org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat",
            "-inputKeyClassName", "co.nubetech.hiho.testdata.Student", "-inputValueClassName",
            "org.apache.hadoop.io.Text" };
    DedupJob job = runDedupJob(args);/*  w  ww .  j  a  v a2 s. c  o m*/
    assertEquals(6, job.getTotalRecordsRead());
    assertEquals(0, job.getBadRecords());
    assertEquals(4, job.getOutput());
    assertEquals(2, job.getDuplicateRecords());

    FileSystem outputFS = getFileSystem();
    Path outputPath = new Path(outputFS.getHomeDirectory(), "output/part-r-00000");
    Configuration conf = new Configuration();
    SequenceFile.Reader reader = new SequenceFile.Reader(outputFS, outputPath, conf);
    Writable writableKey = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
    Writable writableValue = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
    List<Student> expectedOutput = new ArrayList<Student>();
    expectedOutput.add(student1);
    expectedOutput.add(student2);
    expectedOutput.add(student3);
    expectedOutput.add(student4);
    int count = 0;
    while (reader.next(writableKey, writableValue)) {
        logger.debug("key and value is: " + writableKey + ", " + writableValue);
        assertTrue("Matched output " + writableKey, expectedOutput.contains(writableKey));
        count++;
    }
    IOUtils.closeStream(reader);
    assertEquals(4, count);
}

From source file:co.nubetech.hiho.dedup.TestHashUtility.java

License:Apache License

@Test
public void testMD5HashForDoubleWritableKey() throws IOException {
    DoubleWritable key = new DoubleWritable(123d);
    MD5Hash md5HashKey1 = HashUtility.getMD5Hash(key);
    MD5Hash md5HashKey2 = HashUtility.getMD5Hash(key);
    assertEquals(md5HashKey1, md5HashKey2);
}

From source file:co.nubetech.hiho.merge.TestMergeJob.java

License:Apache License

@Test
public void testMergeByCustomObjectKeyWithSequenceFileInputFormat() throws Exception {
    Student student1 = setStudent(new Text("Sam"), new Text("US"), new IntWritable(1),
            new LongWritable(9999999998l), new DoubleWritable(99.12));
    Student student2 = setStudent(new Text("John"), new Text("AUS"), new IntWritable(2),
            new LongWritable(9999999999l), new DoubleWritable(90.12));
    Student student3 = setStudent(new Text("Mary"), new Text("UK"), new IntWritable(3),
            new LongWritable(9999999988l), new DoubleWritable(69.12));
    Student student4 = setStudent(new Text("Kelvin"), new Text("UK"), new IntWritable(4),
            new LongWritable(9999998888l), new DoubleWritable(59.12));

    HashMap<Student, Text> inputData1 = new HashMap<Student, Text>();
    inputData1.put(student1, new Text("Macon Kent,6269 Aenean St.,1-247-399-1051,08253"));
    inputData1.put(student2, new Text("Dale Zamora,521-7792 Mauris Rd.,1-214-625-6970,90510"));
    inputData1.put(student3, new Text("Charles Wood,525-9709 In Rd.,1-370-528-4758,62714"));
    createSequenceFileInHdfs(inputData1, "/input1", "testFile1.seq");

    HashMap<Student, Text> inputData2 = new HashMap<Student, Text>();
    inputData2.put(student2, new Text("Austin Farley,4794 Donec Ave,1-230-823-8164,13508"));
    inputData2.put(student3, new Text("Macaulay Jackson,5435 Dui. Avenue,1-770-395-6446,31584"));
    inputData2.put(student4, new Text("Timon Leonard,716 Ac Ave,1-857-935-3882,62240"));
    createSequenceFileInHdfs(inputData2, "/input2", "testFile2.seq");

    String[] args = new String[] { "-newPath", "/input1", "-oldPath", "/input2", "-mergeBy", "key",
            "-outputPath", "output", "-inputFormat",
            "org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat", "-inputKeyClassName",
            "co.nubetech.hiho.testdata.Student", "-inputValueClassName", "org.apache.hadoop.io.Text" };
    MergeJob job = runMergeJobs(args);//from  ww w .j  a v a2 s. c  o m
    assertEquals(3, job.getTotalRecordsNew());
    assertEquals(3, job.getTotalRecordsOld());
    assertEquals(0, job.getBadRecords());
    assertEquals(4, job.getOutput());

    FileSystem outputFS = getFileSystem();
    Path outputPath = new Path(outputFS.getHomeDirectory(), "output/part-r-00000");
    Configuration conf = new Configuration();
    SequenceFile.Reader reader = new SequenceFile.Reader(outputFS, outputPath, conf);
    Writable writableKey = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
    Writable writableValue = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
    List<Student> expectedOutput = new ArrayList<Student>();
    expectedOutput.add(student1);
    expectedOutput.add(student2);
    expectedOutput.add(student3);
    expectedOutput.add(student4);
    int count = 0;
    while (reader.next(writableKey, writableValue)) {
        logger.debug("key and value is: " + writableKey + ", " + writableValue);
        assertTrue("Matched output " + writableKey, expectedOutput.contains(writableKey));
        count++;
    }
    IOUtils.closeStream(reader);
    assertEquals(4, count);
}

From source file:com.axiomine.largecollections.kryo.serializers.DoubleWritableSerializer.java

License:Apache License

public DoubleWritable read(Kryo kryo, Input input, Class<DoubleWritable> type) {
    return new DoubleWritable(input.readDouble());
}

From source file:com.bark.hadoop.lab3.SortMapper.java

@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
    double pageRank = 0;
    //intermediate files identify pageranks with _!0.0000.. pattern.
    Pattern pt = Pattern.compile("(_!\\d+.\\S+)");
    Matcher mt = pt.matcher(value.toString());
    if (mt.find()) {
        pageRank = Double.parseDouble(mt.group(1).substring(2));
    }/*w  w  w.ja  va2s  .c  om*/
    //ignore cases with pageranks below 5/N
    double minThreshold = 5d / (context.getConfiguration().getInt("N", 0));
    if (pageRank >= minThreshold) {
        context.write(new DoubleWritable(pageRank), new Text(value.toString().split("\t")[0]));
    }
}

From source file:com.cloudera.crunch.type.writable.WritablesTest.java

License:Open Source License

@Test
public void testDoubles() throws Exception {
    double j = 55.5d;
    DoubleWritable w = new DoubleWritable(j);
    testInputOutputFn(Writables.doubles(), j, w);
}

From source file:com.dasasian.chok.util.WritableType.java

License:Apache License

/**
 * Convert a java primitive type wrapper (like String, Integer, Float, etc...)
 * to the corresponding hadoop {@link WritableComparable}.
 * @param object the object to convert//ww w  .  j av a  2s .  c  om
 * @return the writable comparable
 */
public WritableComparable convertComparable(Object object) {
    switch (this) {
    case TEXT:
        return new Text((String) object);
    case BYTE:
        return new ByteWritable((Byte) object);
    case INT:
        return new IntWritable((Integer) object);
    case LONG:
        return new LongWritable(((Long) object));
    case FLOAT:
        return new FloatWritable((Float) object);
    case DOUBLE:
        return new DoubleWritable((Double) object);
    }
    throw getUnhandledTypeException();
}