Example usage for org.apache.hadoop.io FloatWritable FloatWritable

List of usage examples for org.apache.hadoop.io FloatWritable FloatWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io FloatWritable FloatWritable.

Prototype

public FloatWritable(float value) 

Source Link

Usage

From source file:org.apache.giraph.types.ops.FloatTypeOps.java

License:Apache License

@Override
public FloatWritable createCopy(FloatWritable from) {
    return new FloatWritable(from.get());
}

From source file:org.apache.giraph.utils.TestWritableUtils.java

License:Apache License

/**
 * Tests readList and writeList functions in writable utils.
 * @throws IOException// www  .  j  a v  a2  s .  c  o  m
 */
@Test
public void testListSerialization() throws IOException {
    List<Writable> list = new ArrayList<>();
    list.add(new LongWritable(1));
    list.add(new LongWritable(2));
    list.add(null);
    list.add(new FloatWritable(3));
    list.add(new FloatWritable(4));
    list.add(new LongWritable(5));
    list.add(new LongWritable(6));

    ByteArrayOutputStream bos = new ByteArrayOutputStream();
    DataOutputStream dos = new DataOutputStream(bos);
    WritableUtils.writeList(list, dos);
    dos.close();

    byte[] data = bos.toByteArray();

    DataInputStream input = new DataInputStream(new ByteArrayInputStream(data));

    List<Writable> result = (List<Writable>) WritableUtils.readList(input);

    Assert.assertEquals(list, result);

}

From source file:org.apache.hama.examples.MaxFlowTest.java

License:Apache License

private void generateTestData() {

    Configuration conf = new Configuration();
    FileSystem fs;/*  w ww  .ja v  a2 s.co m*/
    SequenceFile.Writer writer = null;
    try {
        fs = FileSystem.get(conf);
        Path path = new Path(INPUT);
        writer = SequenceFile.createWriter(fs, conf, path, FloatWritable.class, FloatArrayWritable.class);

        for (String s : input) {
            FloatArrayWritable value = new FloatArrayWritable();
            FloatWritable[] valueArray = new FloatWritable[2];
            value.set(valueArray);
            String[] array = s.split("\t");
            FloatWritable key = new FloatWritable(Float.valueOf(array[0]));
            valueArray[0] = new FloatWritable(Float.valueOf(array[1])); // store v2.
            valueArray[1] = new FloatWritable(Float.valueOf(array[2]));
            System.out.println(" " + Float.valueOf(array[0]) + " " + Float.valueOf(array[1]) + " "
                    + Float.valueOf(array[2]));
            writer.append(key, value);
        }
        writer.close();
    } catch (IOException e1) {
        // TODO Auto-generated catch block
        e1.printStackTrace();
    } finally {
        if (writer != null) {
            try {
                writer.close();
            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
        }
    }
}

From source file:org.apache.hawq.pxf.plugins.hdfs.utilities.RecordkeyAdapter.java

License:Apache License

private ValConverter initializeConverter(Object key) {

    if (key instanceof Integer) {
        return new ValConverter() {
            @Override// w w w . j  a v a 2s .  c om
            public Writable get(Object key) {
                return (new IntWritable((Integer) key));
            }
        };
    } else if (key instanceof Byte) {
        return new ValConverter() {
            @Override
            public Writable get(Object key) {
                return (new ByteWritable((Byte) key));
            }
        };
    } else if (key instanceof Boolean) {
        return new ValConverter() {
            @Override
            public Writable get(Object key) {
                return (new BooleanWritable((Boolean) key));
            }
        };
    } else if (key instanceof Double) {
        return new ValConverter() {
            @Override
            public Writable get(Object key) {
                return (new DoubleWritable((Double) key));
            }
        };
    } else if (key instanceof Float) {
        return new ValConverter() {
            @Override
            public Writable get(Object key) {
                return (new FloatWritable((Float) key));
            }
        };
    } else if (key instanceof Long) {
        return new ValConverter() {
            @Override
            public Writable get(Object key) {
                return (new LongWritable((Long) key));
            }
        };
    } else if (key instanceof String) {
        return new ValConverter() {
            @Override
            public Writable get(Object key) {
                return (new Text((String) key));
            }
        };
    } else {
        return new ValConverter() {
            @Override
            public Writable get(Object key) {
                throw new UnsupportedOperationException(
                        "Unsupported recordkey data type " + key.getClass().getName());
            }
        };
    }
}

From source file:org.apache.hawq.pxf.plugins.hdfs.utilities.RecordkeyAdapterTest.java

License:Apache License

/**
 * Test convertKeyValue for Float type/*from ww w .j a  v a  2s.  c  o m*/
 */
@Test
public void convertKeyValueFloat() {
    float key = (float) 2.3;
    initRecordkeyAdapter();
    runConvertKeyValue(key, new FloatWritable(key));
}

From source file:org.apache.mahout.cf.taste.hadoop.slopeone.SlopeOneDiffsToAveragesReducer.java

License:Apache License

@Override
protected void reduce(EntityEntityWritable key, Iterable<FloatWritable> values, Context context)
        throws IOException, InterruptedException {
    int count = 0;
    double total = 0.0;
    for (FloatWritable value : values) {
        total += value.get();//from  w ww . j a  v a2 s.  co  m
        count++;
    }
    context.write(key, new FloatWritable((float) (total / count)));
}

From source file:org.apache.mahout.cf.taste.hadoop.slopeone.SlopeOnePrefsToDiffsReducer.java

License:Apache License

@Override
protected void reduce(VarLongWritable key, Iterable<EntityPrefWritable> values, Context context)
        throws IOException, InterruptedException {
    List<EntityPrefWritable> prefs = new ArrayList<EntityPrefWritable>();
    for (EntityPrefWritable writable : values) {
        prefs.add(new EntityPrefWritable(writable));
    }/* w ww .  ja va  2  s.  com*/
    Collections.sort(prefs, ByItemIDComparator.getInstance());
    int size = prefs.size();
    for (int i = 0; i < size; i++) {
        EntityPrefWritable first = prefs.get(i);
        long itemAID = first.getID();
        float itemAValue = first.getPrefValue();
        for (int j = i + 1; j < size; j++) {
            EntityPrefWritable second = prefs.get(j);
            long itemBID = second.getID();
            float itemBValue = second.getPrefValue();
            context.write(new EntityEntityWritable(itemAID, itemBID),
                    new FloatWritable(itemBValue - itemAValue));
        }
    }
}

From source file:org.apache.mahout.cf.taste.hadoop.SlopeOneDiffsToAveragesReducer.java

License:Apache License

@Override
protected void reduce(ItemItemWritable key, Iterable<FloatWritable> values, Context context)
        throws IOException, InterruptedException {
    int count = 0;
    double total = 0.0;
    for (FloatWritable value : values) {
        total += value.get();/*ww w. j  av a 2 s .  c  om*/
        count++;
    }
    context.write(key, new FloatWritable((float) (total / count)));
}

From source file:org.apache.mahout.cf.taste.hadoop.SlopeOnePrefsToDiffsReducer.java

License:Apache License

@Override
protected void reduce(Text key, Iterable<ItemPrefWritable> values, Context context)
        throws IOException, InterruptedException {
    List<ItemPrefWritable> prefs = new ArrayList<ItemPrefWritable>();
    for (ItemPrefWritable value : values) {
        prefs.add(new ItemPrefWritable(value));
    }// w  w  w  . j  av  a2  s .  c  o m
    Collections.sort(prefs, ByItemIDComparator.getInstance());
    int size = prefs.size();
    for (int i = 0; i < size; i++) {
        ItemPrefWritable first = prefs.get(i);
        long itemAID = first.getItemID();
        float itemAValue = first.getPrefValue();
        for (int j = i + 1; j < size; j++) {
            ItemPrefWritable second = prefs.get(j);
            long itemBID = second.getItemID();
            float itemBValue = second.getPrefValue();
            context.write(new ItemItemWritable(itemAID, itemBID), new FloatWritable(itemBValue - itemAValue));
        }
    }
}

From source file:org.apache.nutch.crawl.UrlWithScore.java

License:Apache License

/**
 * Creates instance with provided non-writable types.
 * //from ww w . j a  v  a  2s  .co  m
 * @param url
 * @param score
 */
public UrlWithScore(String url, float score) {
    this.url = new Text(url);
    this.score = new FloatWritable(score);
}