Example usage for org.apache.hadoop.io DoubleWritable DoubleWritable

List of usage examples for org.apache.hadoop.io DoubleWritable DoubleWritable

Introduction

In this page you can find the example usage for org.apache.hadoop.io DoubleWritable DoubleWritable.

Prototype

public DoubleWritable(double value) 

Source Link

Usage

From source file:com.zinnia.nectar.util.hadoop.DoubleReducer.java

License:Apache License

protected void reduce(Text key, Iterable<DoubleWritable> values, Context context)
        throws IOException, InterruptedException {

    double sum = 0;
    while (values.iterator().hasNext()) {
        double value = values.iterator().next().get();
        sum += value;// w w  w  . j  av a2  s.c o  m
    }

    context.write(key, new DoubleWritable(sum));
}

From source file:com.zqh.giraph.standalone.TriangleCounting.java

License:Apache License

@Override
public void compute(Vertex<Text, Text, NullWritable> vertex, Iterable<Text> messages) throws IOException {
    if (getSuperstep() == 0) {
        //costruisco Text lista vicini
        Text neigborhood = new Text();
        Iterable<Edge<Text, NullWritable>> edges = vertex.getEdges();
        for (Edge<Text, NullWritable> edge : edges) {
            neigborhood.set(neigborhood.toString() + "-" + edge.getTargetVertexId().toString());
        }//from   w w  w  .  j a  v  a  2  s .c o m
        for (Edge<Text, NullWritable> edge : edges) {
            this.sendMessage(edge.getTargetVertexId(), neigborhood);
        }
    } else if (getSuperstep() == 1) {
        Double T = 0.0;
        //confronto edge "mancanti" inviati con lista vicini nodo
        for (Text message : messages) {
            String[] msgSplit = message.toString().split("-");//lista neigbohood
            Iterable<Edge<Text, NullWritable>> edges = vertex.getEdges();
            for (Edge<Text, NullWritable> edge : edges) {
                for (String missEdge : msgSplit) {
                    if (missEdge.equals(edge.getTargetVertexId().toString())) {
                        T++;
                    }
                }
            }
        }

        T = T / 6;
        vertex.setValue(new Text(T.toString()));
        vertex.voteToHalt();

        aggregate(SOMMA, new DoubleWritable(T));
        //            getAggregatedValue(SOMMA);
        //            System.out.println("DEBUG "+ getAggregatedValue(SOMMA));

    }

}

From source file:com.zqh.hadoop.mr.Financial.HighLowDayMapper.java

License:Apache License

/**
 * Expected input:<br>/*from   ww  w .  j  a  v  a  2  s.  c o m*/
 * 
 * <pre>
 * exchange,stock_symbol,date,stock_price_open,stock_price_high,stock_price_low,stock_price_close,stock_volume,stock_price_adj_close
 * NASDAQ,XING,2010-02-08,1.73,1.76,1.71,1.73,147400,1.73
 * </pre>
 */
@Override
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
    String inputLine = value.toString();

    if (inputLine.startsWith("exchange,")) {
        // Line is the header, ignore it
        return;
    }

    String[] columns = inputLine.split(",");

    if (columns.length != 9) {
        // Line isn't the correct number of columns or formatted properly
        return;
    }

    // You'd normally represent money as a BigDecimal, but we're using doubles
    // to make things easier to understand
    double close = Double.parseDouble(columns[6]);
    context.write(new Text(columns[2]), new DoubleWritable(close));
}

From source file:com.zqh.hadoop.mr.Financial.HighLowStockMapper.java

License:Apache License

/**
 * Expected input:<br>//from w w w  .  ja  v  a2 s.c  o  m
 * 
 * <pre>
 * exchange,stock_symbol,date,stock_price_open,stock_price_high,stock_price_low,stock_price_close,stock_volume,stock_price_adj_close
 * NASDAQ,XING,2010-02-08,1.73,1.76,1.71,1.73,147400,1.73
 * </pre>
 */
@Override
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
    String inputLine = value.toString();

    if (inputLine.startsWith("exchange,")) {
        // Line is the header, ignore it
        return;
    }

    String[] columns = inputLine.split(",");

    if (columns.length != 9) {
        // Line isn't the correct number of columns or formatted properly
        return;
    }

    // You'd normally represent money as a BigDecimal, but we're using doubles
    // to make things easier to understand
    double close = Double.parseDouble(columns[8]);
    context.write(new Text(columns[1]), new DoubleWritable(close));
}

From source file:computation.test.SimpleComputationTest.java

License:MIT License

/**
 * Test the messages after finishes superstep1
        // w  ww .j  a  va2 s.  c  om
 */
@Test
public void testSuperstep1() throws Exception

{

    //messages: positions of other vertices
    ArrayList<MessageWritable> messages = new ArrayList<MessageWritable>();

    messages.add(new MessageWritable(new LongWritable(2), new CoordinateWritable(), new BooleanWritable()));

    messages.add(new MessageWritable(new LongWritable(3), new CoordinateWritable(), new BooleanWritable()));

    Vertex<LongWritable, VertexValuesWritable, EdgeValuesWritable> vertex =

            new DefaultVertex<LongWritable, VertexValuesWritable, EdgeValuesWritable>();

    SimpleComputation computation = new SimpleComputation();
    SimpleMasterCompute master = new SimpleMasterCompute();
    master.initialize();

    MockUtils.MockedEnvironment<LongWritable, VertexValuesWritable, EdgeValuesWritable, MessageWritable> env =

            MockUtils.prepareVertexAndComputation(vertex, new LongWritable(1L),

                    new VertexValuesWritable(), true, computation, 1L);

    vertex.setValue(new VertexValuesWritable(new CoordinateWritable(), new DoubleWritable(0)));

    vertex.addEdge(EdgeFactory.create(new LongWritable(2L),

            new EdgeValuesWritable()));
    vertex.addEdge(EdgeFactory.create(new LongWritable(3L),

            new EdgeValuesWritable()));

    computation.compute(vertex, messages);

    assertTrue(vertex.isHalted());

    env.verifyMessageSent(new LongWritable(2),
            new MessageWritable(new LongWritable(1), vertex.getValue().getCoordinate(), new BooleanWritable()));

    env.verifyMessageSent(new LongWritable(3),
            new MessageWritable(new LongWritable(1), vertex.getValue().getCoordinate(), new BooleanWritable()));

}

From source file:de.kp.core.spade.hadoop.ExtEquivalenceClassListWritable.java

License:Open Source License

public ExtEquivalenceClassListWritable(Integer sequences, Double minsupp,
        List<EquivalenceClass> equivClasList) {

    List<EquivalenceClass> sortedEquivClasList = new ArrayList<EquivalenceClass>(equivClasList);
    Collections.sort(sortedEquivClasList);

    this.sequences = new IntWritable(sequences);
    this.size = new IntWritable(sortedEquivClasList.size());

    this.minsupp = new DoubleWritable(minsupp);
    this.eqClasList = new EquivalenceClassListWritable(sortedEquivClasList);

}

From source file:de.l3s.common.features.hadoop.TimeSeriesReducer.java

License:Apache License

public void reduce(Text key, Iterator<Timeseries> values, Context context)
        throws IOException, InterruptedException {
    // To load JRI library
    // Set -Djava.library.path = /usr/lib64/R/library/rJava/jri
    Timeseries timeSeries;/* ww w .j  a  va2  s.  c  om*/
    List<Integer> ts_list = Lists.newArrayList();
    while (values.hasNext()) {

        timeSeries = values.next();
        for (KeyData keydata : timeSeries.ts_points) {
            ts_list.add((int) keydata.dataPoint.fValue);
        }
        // calculate auto correlation score
        double[] acf_score = eval.computeAutoCorrel(ts_list.size(),
                ArrayUtils.toPrimitive(ts_list.toArray(new Integer[ts_list.size()])));

        context.write(key, new DoubleWritable(acf_score[0]));
    }

}

From source file:de.tudarmstadt.ukp.dkpro.bigdata.collocations.AssocReducer.java

License:Apache License

/**
 * Perform assoc calculation, input is: k:ngram:ngramFreq
 * v:(h_|t_)subgram:subgramfreq N = ngram total
 * //from   w  w w .ja  va2  s.  co  m
 * Each ngram will have 2 subgrams, a head and a tail, referred to as A and
 * B respectively below.
 * 
 * A+ B: number of times a+b appear together: ngramFreq A+!B: number of
 * times A appears without B: hSubgramFreq - ngramFreq !A+ B: number of
 * times B appears without A: tSubgramFreq - ngramFreq !A+!B: number of
 * times neither A or B appears (in that order): N - (subgramFreqA +
 * subgramFreqB - ngramFreq)
 */
@Override
protected void reduce(Gram ngram, Iterable<Gram> values, Context context)
        throws IOException, InterruptedException {

    int[] gramFreq = { -1, -1 };

    int frequency = ngram.getFrequency();
    if (ngram.getType() == Gram.Type.UNIGRAM && emitUnigrams) {
        DoubleWritable dd = new DoubleWritable(frequency);
        Text t = new Text(ngram.getString());
        context.getCounter(Count.EMITTED_UNIGRAM).increment(1);
        context.write(t, dd);
        return;
    }
    // TODO better way to handle errors? Wouldn't an exception thrown here
    // cause hadoop to re-try the job?
    String[] gram = new String[2];
    for (Gram value : values) {

        int pos = value.getType() == Gram.Type.HEAD ? 0 : 1;

        if (gramFreq[pos] != -1) {
            log.warn("Extra {} for {}, skipping", value.getType(), ngram);
            if (value.getType() == Gram.Type.HEAD) {
                context.getCounter(Skipped.EXTRA_HEAD).increment(1);
            } else {
                context.getCounter(Skipped.EXTRA_TAIL).increment(1);
            }
            return;
        }

        gram[pos] = value.getString();
        gramFreq[pos] = value.getFrequency();
    }

    if (gramFreq[0] == -1) {
        log.warn("Missing head for {}, skipping.", ngram);
        context.getCounter(Skipped.MISSING_HEAD).increment(1);
        return;
    }
    if (gramFreq[1] == -1) {
        log.warn("Missing tail for {}, skipping", ngram);
        context.getCounter(Skipped.MISSING_TAIL).increment(1);
        return;
    }

    double value;
    // build continguency table
    long k11 = frequency; /* a&b */
    long k12 = gramFreq[0] - frequency; /* a&!b */
    long k21 = gramFreq[1] - frequency; /* !b&a */
    long k22 = ngramTotal - (gramFreq[0] + gramFreq[1] - frequency); /* !a&!b */

    try {

        value = assocCalculator.assoc(k11, k12, k21, k22);

    } catch (IllegalArgumentException ex) {
        context.getCounter(Skipped.LLR_CALCULATION_ERROR).increment(1);
        log.warn(
                "Problem calculating assoc metric for ngram {}, HEAD {}:{}, TAIL {}:{}, k11/k12/k21/k22: {}/{}/{}/{}",
                new Object[] { ngram, gram[0], gramFreq[0], gram[1], gramFreq[1] }, ex);
        return;
    }
    if (value < minValue) {
        context.getCounter(Skipped.LESS_THAN_MIN_VALUE).increment(1);
    } else {

        ass.init(k11, k12, k21, k22);
        // try {
        // Object invoke = metricMethod.invoke(value, gram);
        // } catch (IllegalArgumentException e1) {
        // // TODO Auto-generated catch block
        // e1.printStackTrace();
        // } catch (IllegalAccessException e1) {
        // // TODO Auto-generated catch block
        // e1.printStackTrace();
        // } catch (InvocationTargetException e1) {
        // // TODO Auto-generated catch block
        // e1.printStackTrace();
        // }
        mos.write("llr", new Text(ngram.getString()), new DoubleWritable(value));
        try {
            double pmi = ass.mutual_information();// pmiCalculator.assoc(k11,
            // k12, k21, k22);
            mos.write("pmi", new Text(ngram.getString()), new DoubleWritable(pmi));
        } catch (Exception e) {
            context.getCounter(Skipped.PMI_CALCULATION_ERROR).increment(1);

        }
        try {

            double chi = ass.chisquared();// chiCalculator.assoc(k11, k12,
            // k21, k22);
            mos.write("chi", new Text(ngram.getString()), new DoubleWritable(chi));
        } catch (Exception e) {
            context.getCounter(Skipped.CHI_CALCULATION_ERROR).increment(1);

        }
        try {

            double dice = ass.dice();// diceCalculator.assoc(k11, k12, k21,
            // k22);
            mos.write("dice", new Text(ngram.getString()), new DoubleWritable(dice));
        } catch (Exception e) {
            context.getCounter(Skipped.DICE_CALCULATION_ERROR).increment(1);

        }

        context.getCounter("assoctest", "EMITTED NGRAM").increment(1);

        mos.write("contingency", new Text(ngram.getString()),
                new Text("" + k11 + "\t" + k12 + "\t" + k21 + "\t" + k22));

    }
}

From source file:demo.SsMapper.java

License:Apache License

@Override
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
    String[] tokens = value.toString().split(",");

    System.out.println("Array contents " + Arrays.toString(tokens));
    String symbol = tokens[0].trim();
    System.out.println("Symbol " + symbol);
    Long timestamp = Long.parseLong(tokens[1].trim());
    System.out.println("timestamp " + timestamp);

    Double v = Double.parseDouble(tokens[2].trim());

    System.out.println("double " + v);

    StockKey stockKey = new StockKey(symbol, timestamp);
    DoubleWritable stockValue = new DoubleWritable(v);

    context.write(stockKey, stockValue);
    _log.debug(stockKey.toString() + " => " + stockValue.toString());
}

From source file:eagle.query.aggregate.raw.GroupbyValue.java

License:Apache License

public void add(Double value) {
    this.value.add(new DoubleWritable(value));
}