Example usage for org.apache.hadoop.io FloatWritable FloatWritable

List of usage examples for org.apache.hadoop.io FloatWritable FloatWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io FloatWritable FloatWritable.

Prototype

public FloatWritable(float value) 

Source Link

Usage

From source file:ml.grafos.okapi.cf.CfLongIdFloatTextEdgeReaderTest.java

License:Apache License

@Test
public void test() throws IOException {
    Text inputLine1 = new Text("1 2 5.0");
    Text inputLine2 = new Text("1\t2\t5.000  ");

    CfLongId user = new CfLongId((byte) 0, 1);
    CfLongId item = new CfLongId((byte) 1, 2);
    FloatWritable rating = new FloatWritable(5f);

    String tokens[] = this.preprocessLine(inputLine1);
    assertEquals(user, this.getSourceVertexId(tokens));
    assertEquals(item, this.getTargetVertexId(tokens));
    assertEquals(rating, this.getValue(tokens));

    tokens = this.preprocessLine(inputLine2);
    assertEquals(user, this.getSourceVertexId(tokens));
    assertEquals(item, this.getTargetVertexId(tokens));
    assertEquals(rating, this.getValue(tokens));
}

From source file:net.sf.katta.util.WritableType.java

License:Apache License

/**
 * Convert a java primitive type wrapper (like String, Integer, Float, etc...)
 * to the corresponding hadoop {@link WritableComparable}.
 */// ww w.  j  av a  2 s  .c  om
public WritableComparable convertComparable(Object comparable) {
    switch (this) {
    case TEXT:
        return new Text((String) comparable);
    case BYTE:
        return new ByteWritable(((Byte) comparable).byteValue());
    case INT:
        return new IntWritable(((Integer) comparable).intValue());
    case LONG:
        return new LongWritable((((Long) comparable).longValue()));
    case FLOAT:
        return new FloatWritable(((Float) comparable).floatValue());
    case DOUBLE:
        return new DoubleWritable(((Double) comparable).doubleValue());
    }
    throw getUnhandledTypeException();
}

From source file:oracle.kv.hadoop.hive.table.TableFloatObjectInspector.java

License:Open Source License

@Override
public Object getPrimitiveWritableObject(Object o) {
    return o == null ? null : new FloatWritable(get(o));
}

From source file:org.apache.avro.hadoop.io.TestAvroDatumConverterFactory.java

License:Apache License

@Test
public void testConvertFloatWritable() {
    AvroDatumConverter<FloatWritable, Float> converter = mFactory.create(FloatWritable.class);
    assertEquals(2.2f, converter.convert(new FloatWritable(2.2f)).floatValue(), 0.00001);
}

From source file:org.apache.crunch.io.orc.OrcFileReaderFactoryTest.java

License:Apache License

@Test
public void testColumnPruning() throws IOException {
    Path path = new Path(tempPath, "test.orc");

    String typeStr = "struct<a:int,b:string,c:float>";
    TypeInfo info = TypeInfoUtils.getTypeInfoFromTypeString(typeStr);
    StructObjectInspector soi = (StructObjectInspector) OrcStruct.createObjectInspector(info);
    PType<OrcStruct> ptype = Orcs.orcs(info);

    OrcFileWriter<OrcStruct> writer = new OrcFileWriter<OrcStruct>(conf, path, ptype);
    writer.write(//ww  w  .j  av a  2  s .com
            OrcUtils.createOrcStruct(info, new IntWritable(1), new Text("Alice"), new FloatWritable(167.2f)));
    writer.write(
            OrcUtils.createOrcStruct(info, new IntWritable(2), new Text("Bob"), new FloatWritable(179.7f)));
    writer.close();

    int[] readColumns = { 1 };
    OrcFileSource<OrcStruct> source = new OrcFileSource<OrcStruct>(path, ptype, readColumns);
    for (OrcStruct row : source.read(conf)) {
        List<Object> list = soi.getStructFieldsDataAsList(row);
        assertNull(list.get(0));
        assertNotNull(list.get(1));
        assertNull(list.get(2));
    }
}

From source file:org.apache.crunch.io.orc.OrcWritableTest.java

License:Apache License

@Test
public void testDeepCopy() {
    String typeStr = "struct<a:int,b:string,c:float>";
    TypeInfo info = TypeInfoUtils.getTypeInfoFromTypeString(typeStr);
    StructObjectInspector oi = (StructObjectInspector) OrcStruct.createObjectInspector(info);
    BinarySortableSerDe serde = OrcUtils.createBinarySerde(info);

    OrcStruct struct = OrcUtils.createOrcStruct(info, new IntWritable(1), new Text("Alice"),
            new FloatWritable(165.3f));
    OrcWritable writable = new OrcWritable();
    writable.set(struct);/*from   w w w.  j  a v  a2 s.c om*/
    assertTrue(struct == writable.get());

    writable.setObjectInspector(oi);
    writable.setSerde(serde);

    WritableDeepCopier<OrcWritable> deepCopier = new WritableDeepCopier<OrcWritable>(OrcWritable.class);
    OrcWritable copied = deepCopier.deepCopy(writable);
    assertTrue(writable != copied);
    assertEquals(writable, copied);

    copied.setObjectInspector(oi);
    copied.setSerde(serde);
    OrcStruct copiedStruct = copied.get();
    assertTrue(struct != copiedStruct);
    assertEquals(struct, copiedStruct);

    List<Object> items = oi.getStructFieldsDataAsList(struct);
    List<Object> copiedItems = oi.getStructFieldsDataAsList(copiedStruct);
    for (int i = 0; i < items.size(); i++) {
        assertTrue(items.get(i) != copiedItems.get(i));
        assertEquals(items.get(i), copiedItems.get(i));
    }

    OrcWritable copied2 = deepCopier.deepCopy(copied);
    assertTrue(copied2 != copied);
    assertEquals(copied2, copied);

    copied2.setObjectInspector(oi);
    copied2.setSerde(serde);
    OrcStruct copiedStruct2 = copied2.get();
    assertTrue(copiedStruct2 != copiedStruct);
    assertEquals(copiedStruct2, copiedStruct);

    List<Object> copiedItems2 = oi.getStructFieldsDataAsList(copiedStruct2);
    for (int i = 0; i < items.size(); i++) {
        assertTrue(copiedItems2.get(i) != copiedItems.get(i));
        assertEquals(copiedItems2.get(i), copiedItems.get(i));
    }
}

From source file:org.apache.crunch.io.orc.OrcWritableTest.java

License:Apache License

@Test
public void testCompareTo() {
    String typeStr = "struct<a:int,b:string,c:float>";
    TypeInfo info = TypeInfoUtils.getTypeInfoFromTypeString(typeStr);
    StructObjectInspector oi = (StructObjectInspector) OrcStruct.createObjectInspector(info);
    BinarySortableSerDe serde = OrcUtils.createBinarySerde(info);

    OrcStruct struct1 = OrcUtils.createOrcStruct(info, new IntWritable(1), new Text("AAA"),
            new FloatWritable(3.2f));
    OrcStruct struct2 = OrcUtils.createOrcStruct(info, new IntWritable(1), new Text("AAB"), null);
    OrcStruct struct3 = OrcUtils.createOrcStruct(info, new IntWritable(2), new Text("AAA"), null);
    OrcStruct struct4 = OrcUtils.createOrcStruct(info, new IntWritable(2), new Text("AAA"),
            new FloatWritable(3.2f));

    OrcWritable writable1 = new OrcWritable();
    writable1.set(struct1);/*  w  w  w .j  a v a  2  s  . c  o  m*/
    OrcWritable writable2 = new OrcWritable();
    writable2.set(struct2);
    OrcWritable writable3 = new OrcWritable();
    writable3.set(struct3);
    OrcWritable writable4 = new OrcWritable();
    writable4.set(struct4);

    writable1.setObjectInspector(oi);
    writable2.setObjectInspector(oi);
    writable3.setObjectInspector(oi);
    writable4.setObjectInspector(oi);
    writable1.setSerde(serde);
    writable2.setSerde(serde);
    writable3.setSerde(serde);
    writable4.setSerde(serde);

    assertTrue(writable1.compareTo(writable2) < 0);
    assertTrue(writable2.compareTo(writable3) < 0);
    assertTrue(writable1.compareTo(writable3) < 0);
    assertTrue(writable3.compareTo(writable4) < 0);
}

From source file:org.apache.flink.hadoopcompatibility.mapred.record.datatypes.DefaultFlinkTypeConverter.java

License:Apache License

@SuppressWarnings("unchecked")
private <T> T convert(Record flinkType, int pos, Class<T> hadoopType) {
    if (hadoopType == LongWritable.class) {
        return (T) new LongWritable((flinkType.getField(pos, LongValue.class)).getValue());
    }/*w  w  w  .  ja  va2  s .  co m*/
    if (hadoopType == org.apache.hadoop.io.Text.class) {
        return (T) new Text((flinkType.getField(pos, StringValue.class)).getValue());
    }
    if (hadoopType == org.apache.hadoop.io.IntWritable.class) {
        return (T) new IntWritable((flinkType.getField(pos, IntValue.class)).getValue());
    }
    if (hadoopType == org.apache.hadoop.io.FloatWritable.class) {
        return (T) new FloatWritable((flinkType.getField(pos, FloatValue.class)).getValue());
    }
    if (hadoopType == org.apache.hadoop.io.DoubleWritable.class) {
        return (T) new DoubleWritable((flinkType.getField(pos, DoubleValue.class)).getValue());
    }
    if (hadoopType == org.apache.hadoop.io.BooleanWritable.class) {
        return (T) new BooleanWritable((flinkType.getField(pos, BooleanValue.class)).getValue());
    }
    if (hadoopType == org.apache.hadoop.io.ByteWritable.class) {
        return (T) new ByteWritable((flinkType.getField(pos, ByteValue.class)).getValue());
    }

    throw new RuntimeException(
            "Unable to convert Flink type (" + flinkType.getClass().getCanonicalName() + ") to Hadoop.");
}

From source file:org.apache.giraph.aggregators.FloatAverageAggregator.java

License:Apache License

@Override
public FloatWritable getAggregatedValue() {
    return new FloatWritable(count > 0 ? sum / count : 0.0f);
}

From source file:org.apache.giraph.aggregators.FloatMaxAggregator.java

License:Apache License

@Override
public FloatWritable createInitialValue() {
    return new FloatWritable(Float.NEGATIVE_INFINITY);
}