Example usage for org.apache.hadoop.io DoubleWritable DoubleWritable

List of usage examples for org.apache.hadoop.io DoubleWritable DoubleWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io DoubleWritable DoubleWritable.

Prototype

public DoubleWritable(double value) 

Source Link

Usage

From source file:org.goldenorb.types.message.SampleDoubleMessageTest.java

License:Apache License

@Test
public void testRPC() {
    DoubleMessage dm1 = client.sendAndReceiveMessage(dm0, DESTINATION_VALUE, new DoubleWritable(MESSAGE_VALUE));
    assertTrue(dm0.get() == client.getMessage().get());
    assertEquals(dm0.getDestinationVertex(), client.getMessage().getDestinationVertex());
    assertEquals(dm1.getDestinationVertex(), DESTINATION_VALUE);
    assertTrue(((DoubleWritable) dm1.getMessageValue()).get() == MESSAGE_VALUE);
}

From source file:org.huahinframework.core.io.RecordTest.java

License:Apache License

@Test
public void testGroupingDoubleWritable() {
    Record record = new Record();
    DoubleWritable o = new DoubleWritable(10.0);
    record.addGrouping("Object", o);
    assertEquals(record.getGroupingDoubleWritable("Object"), o);
    assertEquals(record.getGroupingDoubleWritable("Object2"), null);

    try {//from w  ww .j  a  v  a  2  s  .c om
        record.getGroupingInteger("Object");
        fail("fail ClassCastException");
    } catch (Exception e) {
        assertTrue(e instanceof ClassCastException);
    }
}

From source file:org.huahinframework.core.io.RecordTest.java

License:Apache License

@Test
public void testValueDoubleWritable() {
    Record record = new Record();
    DoubleWritable o = new DoubleWritable(10.0);
    record.addValue("Object", o);
    assertEquals(record.getValueDoubleWritable("Object"), o);
    assertEquals(record.getValueDoubleWritable("Object2"), null);

    try {//from w ww  . j  a  va 2  s  .c  o  m
        record.getValueIntWritable("Object");
        fail("fail ClassCastException");
    } catch (Exception e) {
        assertTrue(e instanceof ClassCastException);
    }
}

From source file:org.huahinframework.core.util.ObjectUtil.java

License:Apache License

/**
 * Convert the HadoopObject from Java primitive.
 * @param object Java primitive object/*from   www  .ja  va2 s.  co m*/
 * @return HadoopObject
 */
public static HadoopObject primitive2Hadoop(Object object) {
    if (object == null) {
        return new HadoopObject(NULL, NullWritable.get());
    }

    if (object instanceof Byte) {
        return new HadoopObject(BYTE, new ByteWritable((Byte) object));
    } else if (object instanceof Integer) {
        return new HadoopObject(INTEGER, new IntWritable((Integer) object));
    } else if (object instanceof Long) {
        return new HadoopObject(LONG, new LongWritable((Long) object));
    } else if (object instanceof Double) {
        return new HadoopObject(DOUBLE, new DoubleWritable((Double) object));
    } else if (object instanceof Float) {
        return new HadoopObject(FLOAT, new FloatWritable((Float) object));
    } else if (object instanceof Boolean) {
        return new HadoopObject(BOOLEAN, new BooleanWritable((Boolean) object));
    } else if (object instanceof String) {
        return new HadoopObject(STRING, new Text((String) object));
    } else if (object.getClass().isArray()) {
        return arrayPrimitive2Hadoop(object);
    } else if (object instanceof Collection<?>) {
        Collection<?> collection = (Collection<?>) object;
        return arrayPrimitive2Hadoop(collection.toArray());
    } else if (object instanceof Map<?, ?>) {
        Map<?, ?> map = (Map<?, ?>) object;
        if (map.size() == 0) {
            throw new ClassCastException("object not found");
        }

        MapWritable mapWritable = new MapWritable();
        for (Entry<?, ?> entry : map.entrySet()) {
            mapWritable.put(primitive2Hadoop(entry.getKey()).getObject(),
                    primitive2Hadoop(entry.getValue()).getObject());
        }

        return new HadoopObject(MAP, mapWritable);
    }

    throw new ClassCastException("cast object not found");
}

From source file:org.huahinframework.core.util.ObjectUtilTest.java

License:Apache License

@Test
public void testPrimitive2HadoopIODouble() {
    double o = 123;
    HadoopObject ho = ObjectUtil.primitive2Hadoop(o);
    assertEquals(ObjectUtil.DOUBLE, ho.getType());
    assertEquals(new DoubleWritable(o), ho.getObject());
}

From source file:org.huahinframework.core.util.ObjectUtilTest.java

License:Apache License

@Test
public void testHadoopIO2PrimitiveDouble() {
    double o = 123;
    PrimitiveObject no = ObjectUtil.hadoop2Primitive(new DoubleWritable(o));
    assertEquals(ObjectUtil.DOUBLE, no.getType());
    assertEquals(o, no.getObject());//from ww w.j  ava 2s . c om
}

From source file:org.openflamingo.mapreduce.aggregator.DoubleMaxAggregator.java

License:Apache License

@Override
public DoubleWritable getAggregatedValue() {
    return new DoubleWritable(max);
}

From source file:org.openflamingo.mapreduce.aggregator.DoubleMinAggregator.java

License:Apache License

@Override
public DoubleWritable getAggregatedValue() {
    return new DoubleWritable(min);
}

From source file:org.openflamingo.mapreduce.aggregator.DoubleOverwriteAggregator.java

License:Apache License

@Override
public DoubleWritable getAggregatedValue() {
    return new DoubleWritable(result);
}

From source file:org.openflamingo.mapreduce.aggregator.DoubleProductAggregator.java

License:Apache License

@Override
public DoubleWritable getAggregatedValue() {
    return new DoubleWritable(product);
}