Example usage for org.apache.hadoop.io DoubleWritable DoubleWritable

List of usage examples for org.apache.hadoop.io DoubleWritable DoubleWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io DoubleWritable DoubleWritable.

Prototype

public DoubleWritable(double value) 

Source Link

Usage

From source file:org.apache.hama.ml.regression.VectorDoubleFileInputFormatTest.java

License:Apache License

@Test
public void testFileRead() throws Exception {
    VectorDoubleFileInputFormat inputFormat = new VectorDoubleFileInputFormat();
    Path file = new Path("src/test/resources/vd_file_sample.txt");
    InputSplit split = new FileSplit(file, 0, 1000, new String[] { "localhost" });
    BSPJob job = new BSPJob();
    RecordReader<VectorWritable, DoubleWritable> recordReader = inputFormat.getRecordReader(split, job);
    assertNotNull(recordReader);/* www .ja v a 2s.c  o m*/
    VectorWritable key = recordReader.createKey();
    assertNotNull(key);
    DoubleWritable value = recordReader.createValue();
    assertNotNull(value);
    assertTrue(recordReader.next(key, value));
    assertEquals(new DenseDoubleVector(new double[] { 2d, 3d, 4d }), key.getVector());
    assertEquals(new DoubleWritable(1d), value);
}

From source file:org.apache.hama.ml.semiclustering.SemiClusterTextReader.java

License:Apache License

@Override
public boolean parseVertex(LongWritable key, Text value, Vertex<Text, DoubleWritable, Text> vertex) {

    String line = value.toString();
    String[] lineSplit = line.split("\t");
    if (!line.startsWith("#")) {
        lastVertexId = lineSplit[0];/* ww w. ja  v a  2  s  .  co  m*/
        adjacents = Arrays.asList(lineSplit[1].split(","));
        vertex.setVertexID(new Text(lastVertexId));
        for (String adjacent : adjacents) {
            String[] ValueSplit = adjacent.split("-");
            vertex.addEdge(new Edge<Text, DoubleWritable>(new Text(ValueSplit[0]),
                    new DoubleWritable(Double.parseDouble(ValueSplit[1]))));
        }
        return true;
    }
    return false;
}

From source file:org.apache.hama.util.TestKryoSerializer.java

License:Apache License

public void testSerialization() throws Exception {
    Kryo kryo = new Kryo();
    kryo.register(DoubleWritable.class);

    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    Output out = new Output(outputStream, 4096);

    for (int i = 0; i < 10; i++) {
        DoubleWritable a = new DoubleWritable(i + 0.123);
        kryo.writeClassAndObject(out, a);
        out.flush();/*w  w w. ja v  a2s  .  c o  m*/
    }

    System.out.println(outputStream.size());

    ByteArrayInputStream bin = new ByteArrayInputStream(outputStream.toByteArray());
    Input in = new Input(bin, 4096);

    for (int i = 0; i < 10; i++) {
        DoubleWritable b = (DoubleWritable) kryo.readClassAndObject(in);
        System.out.println(bin.available() + ", " + b);
    }
}

From source file:org.apache.hawq.pxf.plugins.hdfs.utilities.RecordkeyAdapter.java

License:Apache License

private ValConverter initializeConverter(Object key) {

    if (key instanceof Integer) {
        return new ValConverter() {
            @Override/*from  ww  w . j a va  2  s  .  c  o m*/
            public Writable get(Object key) {
                return (new IntWritable((Integer) key));
            }
        };
    } else if (key instanceof Byte) {
        return new ValConverter() {
            @Override
            public Writable get(Object key) {
                return (new ByteWritable((Byte) key));
            }
        };
    } else if (key instanceof Boolean) {
        return new ValConverter() {
            @Override
            public Writable get(Object key) {
                return (new BooleanWritable((Boolean) key));
            }
        };
    } else if (key instanceof Double) {
        return new ValConverter() {
            @Override
            public Writable get(Object key) {
                return (new DoubleWritable((Double) key));
            }
        };
    } else if (key instanceof Float) {
        return new ValConverter() {
            @Override
            public Writable get(Object key) {
                return (new FloatWritable((Float) key));
            }
        };
    } else if (key instanceof Long) {
        return new ValConverter() {
            @Override
            public Writable get(Object key) {
                return (new LongWritable((Long) key));
            }
        };
    } else if (key instanceof String) {
        return new ValConverter() {
            @Override
            public Writable get(Object key) {
                return (new Text((String) key));
            }
        };
    } else {
        return new ValConverter() {
            @Override
            public Writable get(Object key) {
                throw new UnsupportedOperationException(
                        "Unsupported recordkey data type " + key.getClass().getName());
            }
        };
    }
}

From source file:org.apache.hawq.pxf.plugins.hdfs.utilities.RecordkeyAdapterTest.java

License:Apache License

/**
 * Test convertKeyValue for Double type/*  ww w .  jav  a2 s . com*/
 */
@Test
public void convertKeyValueDouble() {
    double key = 2.3;
    initRecordkeyAdapter();
    runConvertKeyValue(key, new DoubleWritable(key));
}

From source file:org.apache.jena.grande.giraph.pagerank.PageRankMasterCompute.java

License:Apache License

@Override
public void compute() {
    log.debug("compute()");
    if (log.isDebugEnabled())
        logAggregators("compute", "Aggregators before:");
    setAggregatedValue("dangling-previous", getAggregatedValue("dangling-current"));
    setAggregatedValue("dangling-current", new DoubleWritable(0));
    setAggregatedValue("error-previous", getAggregatedValue("error-current"));
    setAggregatedValue("error-current", new DoubleWritable(0));
    if (log.isDebugEnabled())
        logAggregators("compute", "Aggregators after:");
}

From source file:org.apache.jena.grande.giraph.pagerank.PageRankVertex.java

License:Apache License

private void sendMessages() {
    if ((getSuperstep() - 3 < numIterations)
            && (((DoubleWritable) getAggregatedValue("error-previous")).get() > tolerance)) {
        long edges = getNumEdges();
        if (edges > 0) {
            sendMessageToAllEdges(new DoubleWritable(getValue().get() / edges));
        } else {//from   www .  java 2 s.com
            aggregate("dangling-current", getValue());
        }
    } else {
        aggregate("pagerank-sum", getValue());
        voteToHalt();
        log.debug("{}#{} compute() --> halt", getId(), getSuperstep());
    }
}

From source file:org.apache.jena.grande.giraph.pagerank.SimplePageRankVertex.java

License:Apache License

@Override
public void compute(Iterable<DoubleWritable> msgIterator) {
    log.debug("{}#{} - compute(...) vertexValue={}", new Object[] { getId(), getSuperstep(), getValue() });

    if (getSuperstep() >= 1) {
        double sum = 0;
        for (DoubleWritable msg : msgIterator) {
            sum += msg.get();//from   w w w .  j a va2s.c  o  m
        }
        DoubleWritable vertexValue = new DoubleWritable((0.15f / getTotalNumVertices()) + 0.85f * sum);
        setValue(vertexValue);
    }

    if (getSuperstep() < NUM_ITERATIONS) {
        long edges = getNumEdges();
        sendMessageToAllEdges(new DoubleWritable(getValue().get() / edges));
    } else {
        voteToHalt();
    }
}

From source file:org.apache.mahout.cf.taste.hadoop.similarity.item.ItemSimilarityJobTest.java

License:Apache License

/**
 * Tests {@link ItemSimilarityJob.MostSimilarItemPairsMapper}
 *//*from  www.  j  av a 2s  . co  m*/
@Test
public void testMostSimilarItemsPairsMapper() throws Exception {

    OpenIntLongHashMap indexItemIDMap = new OpenIntLongHashMap();
    indexItemIDMap.put(12, 12L);
    indexItemIDMap.put(34, 34L);
    indexItemIDMap.put(56, 56L);

    Mapper<IntWritable, VectorWritable, EntityEntityWritable, DoubleWritable>.Context context = EasyMock
            .createMock(Mapper.Context.class);

    context.write(new EntityEntityWritable(34L, 56L), new DoubleWritable(0.9));

    EasyMock.replay(context);

    Vector vector = new RandomAccessSparseVector(Integer.MAX_VALUE);
    vector.set(12, 0.2);
    vector.set(56, 0.9);

    ItemSimilarityJob.MostSimilarItemPairsMapper mapper = new ItemSimilarityJob.MostSimilarItemPairsMapper();
    setField(mapper, "indexItemIDMap", indexItemIDMap);
    setField(mapper, "maxSimilarItemsPerItem", 1);

    mapper.map(new IntWritable(34), new VectorWritable(vector), context);

    EasyMock.verify(context);
}

From source file:org.apache.mahout.cf.taste.hadoop.similarity.item.ItemSimilarityJobTest.java

License:Apache License

/**
 * Tests {@link ItemSimilarityJob.MostSimilarItemPairsReducer}
 *//* w  w  w.ja v a2s  .  c om*/
@Test
public void testMostSimilarItemPairsReducer() throws Exception {
    Reducer<EntityEntityWritable, DoubleWritable, EntityEntityWritable, DoubleWritable>.Context context = EasyMock
            .createMock(Reducer.Context.class);

    context.write(new EntityEntityWritable(123L, 456L), new DoubleWritable(0.5));

    EasyMock.replay(context);

    new ItemSimilarityJob.MostSimilarItemPairsReducer().reduce(new EntityEntityWritable(123L, 456L),
            Arrays.asList(new DoubleWritable(0.5), new DoubleWritable(0.5)), context);

    EasyMock.verify(context);
}