Example usage for org.apache.hadoop.io DoubleWritable DoubleWritable

List of usage examples for org.apache.hadoop.io DoubleWritable DoubleWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io DoubleWritable DoubleWritable.

Prototype

public DoubleWritable(double value) 

Source Link

Usage

From source file:com.tfm.utad.reducerdata.ReducerDataVerticaMapperReducerTest.java

@Test
public void testMapper() throws IOException, ParseException {
    String key = "123456";
    Date date = sdf.parse("2014-12-06 17:43:21");
    mapDriver.withInput(new Text(key),
            new Text("40.48989;-3.65754;User189;2014-12-06 17:43:21;20141206-34567-189"));
    mapDriver.withOutput(new Text("User189" + "20141206-34567-189"),
            new ReducerVerticaValue(new LongWritable((long) 123456), new Text("User189"), date,
                    new Text("20141206-34567-189"), new DoubleWritable(Double.valueOf("40.48989")),
                    new DoubleWritable(Double.valueOf("-3.65754")), new LongWritable(new Long("189"))));
    mapDriver.runTest();//from  w  w w . j a v  a 2  s .c  o m
}

From source file:com.tfm.utad.reducerdata.ReducerDataVerticaMapperReducerTest.java

@Test
public void testReducer() throws IOException, ParseException {
    List<ReducerVerticaValue> values = new ArrayList<>();
    Date date = sdf.parse("2014-12-06 17:43:21");
    ReducerVerticaValue verticaValue = new ReducerVerticaValue(new LongWritable((long) 123456),
            new Text("User189"), date, new Text("20141206-34567-189"),
            new DoubleWritable(Double.valueOf("40.48989")), new DoubleWritable(Double.valueOf("-3.65754")),
            new LongWritable(new Long("189")));
    values.add(verticaValue);//from  ww  w.ja  va 2  s.  co  m
    reduceDriver.withInput(new Text("User189" + "20141206-34567-189"), values);
    reduceDriver.withOutput(new Text(values.get(0).toString()), NullWritable.get());
    reduceDriver.runTest();
}

From source file:com.veera.secondarysort.demo2.SsMapper.java

License:Apache License

@Override
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
    String[] tokens = value.toString().split(",");

    String symbol = tokens[0].trim();
    Long timestamp = Long.parseLong(tokens[1].trim());
    Double v = Double.parseDouble(tokens[2].trim());

    StockKey stockKey = new StockKey(symbol, timestamp);
    DoubleWritable stockValue = new DoubleWritable(v);

    context.write(stockKey, stockValue);
    _log.debug(stockKey.toString() + " => " + stockValue.toString());
}

From source file:com.zinnia.nectar.regression.hadoop.primitive.mapreduce.DoubleSumReducer.java

License:Apache License

protected void reduce(Text key, Iterable<DoubleWritable> valueSet, Context context)
        throws IOException, InterruptedException {
    // TODO Auto-generated method stub
    Iterator<DoubleWritable> values = valueSet.iterator();
    double sum = 0;
    while (values.hasNext()) {
        sum += values.next().get();/*  ww w  .  j av  a2  s .  co  m*/
    }
    context.write(key, new DoubleWritable(sum));
}

From source file:com.zinnia.nectar.regression.hadoop.primitive.mapreduce.MeanMapper.java

License:Apache License

protected void map(K key, Text value, Context context) throws IOException, InterruptedException {
    // TODO Auto-generated method stub
    double doubleValue = Double.parseDouble(value.toString());
    double divValue = doubleValue / n;
    context.write(new Text("Mean"), new DoubleWritable(divValue));
}

From source file:com.zinnia.nectar.regression.hadoop.primitive.mapreduce.SigmaMapper.java

License:Apache License

protected void map(K key, Text value, Context context) throws IOException {
    // TODO Auto-generated method stub
    double doubleValue = Double.parseDouble(value.toString());
    try {/*from w  ww . ja va2 s .c o m*/
        context.write(new Text("sigmax"), new DoubleWritable(doubleValue));
    } catch (InterruptedException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

From source file:com.zinnia.nectar.regression.hadoop.primitive.mapreduce.SigmaSqMapper.java

License:Apache License

protected void map(K key, Text value, Context context) throws IOException, InterruptedException {
    // TODO Auto-generated method stub
    double doubleValue = Double.parseDouble(value.toString());
    double square = doubleValue * doubleValue;
    context.write(new Text("SigmaxSquare"), new DoubleWritable(square));
}

From source file:com.zinnia.nectar.regression.hadoop.primitive.mapreduce.SigmaXYMapper.java

License:Apache License

protected void map(K key, Text value, Context context) throws IOException, InterruptedException {
    // TODO Auto-generated method stub
    String[] values = value.toString().split("\t");

    double x = Double.parseDouble(values[0]);
    double y = Double.parseDouble(values[1]);

    double longValue = x * y;
    context.write(new Text("sigmaxy"), new DoubleWritable(longValue));
}

From source file:com.zinnia.nectar.regression.hadoop.primitive.mapreduce.SortMapper.java

License:Apache License

protected void map(K key, Text value, Context context) throws IOException {
    // TODO Auto-generated method stub
    double doubleValue = Double.parseDouble(value.toString());
    try {/*from w  w w  . j  av a2s.c o  m*/
        context.write(new DoubleWritable(doubleValue), new DoubleWritable(doubleValue));
    } catch (InterruptedException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

From source file:com.zinnia.nectar.regression.hadoop.primitive.mapreduce.YDiffMapper.java

License:Apache License

protected void map(K key, V value, Context context) throws IOException, InterruptedException {
    // TODO Auto-generated method stub
    String columnValues[] = value.toString().split("\t");
    double yCapValue = 0;
    double constant = Double.parseDouble(paramValues[0]);
    yCapValue += constant;//from www  .  j  av  a 2  s .co m
    for (int i = 0; i < columnValues.length - 1; i++) {
        double val = Double.parseDouble(columnValues[i]);
        double paramValue = Double.parseDouble(paramValues[i + 1]);
        yCapValue += (val * paramValue);
    }
    double yValue = Double.parseDouble(columnValues[columnValues.length - 1]);
    double yDiffValue = yCapValue - yValue;
    double yDiffSquare = yDiffValue * yDiffValue;
    context.write(new Text("ydiff"), new DoubleWritable(yDiffSquare));
}