Example usage for org.apache.hadoop.io DoubleWritable get

List of usage examples for org.apache.hadoop.io DoubleWritable get

Introduction

In this page you can find the example usage for org.apache.hadoop.io DoubleWritable get.

Prototype

public double get() 

Source Link

Usage

From source file:com.linkedin.cubert.io.CompactWritablesDeserializer.java

License:Open Source License

@SuppressWarnings("unchecked")
@Override//from w  ww. ja v a 2 s . com
public K deserialize(K object) throws IOException {
    if (in.available() == 0)
        throw new IOException();

    Tuple t = (Tuple) object;

    if (t == null) {
        t = TupleFactory.getInstance().newTuple(datatypes.length);
    }

    for (int i = 0; i < datatypes.length; i++) {
        Object field = null;

        switch (datatypes[i]) {
        case BOOLEAN: {
            IntWritable iw = VariableLengthEncoder.decodeInteger(in);
            if (iw != null) {
                ((BooleanWritable) writables[i]).set(iw.get() == 1);
                field = writables[i];
            }
            break;
        }
        case BYTE: {
            IntWritable iw = VariableLengthEncoder.decodeInteger(in);
            if (iw != null) {
                ((ByteWritable) writables[i]).set((byte) iw.get());
                field = writables[i];
            }
            break;
        }
        case DOUBLE: {
            DoubleWritable dw = VariableLengthEncoder.decodeDouble(in);
            if (dw != null) {
                ((DoubleWritable) writables[i]).set(dw.get());
                field = writables[i];
            }
            break;
        }
        case FLOAT: {
            FloatWritable fw = VariableLengthEncoder.decodeFloat(in);
            if (fw != null) {
                ((FloatWritable) writables[i]).set(fw.get());
                field = writables[i];
            }
            break;
        }
        case INT: {
            IntWritable iw = VariableLengthEncoder.decodeInteger(in);
            if (iw != null) {
                ((IntWritable) writables[i]).set(iw.get());
                field = writables[i];
            }
            break;
        }
        case LONG: {
            LongWritable lw = VariableLengthEncoder.decodeLong(in);
            if (lw != null) {
                ((LongWritable) writables[i]).set(lw.get());
                field = writables[i];
            }
            break;
        }
        case STRING: {
            IntWritable iw = VariableLengthEncoder.decodeInteger(in);
            if (iw != null) {
                int length = iw.get();

                if (length > buffer.length)
                    buffer = new byte[2 * buffer.length];

                in.read(buffer, 0, length);
                ((Text) writables[i]).set(new String(buffer, 0, length));
                field = writables[i];
            }
            break;
        }
        default:
            throw new RuntimeException("Cannot deserialize column of type " + datatypes[i]);
        }

        t.set(i, field);

    }

    return (K) t;
}

From source file:com.marcolotz.MRComponents.SerializerConverter.java

License:Creative Commons License

/***
 * Reads a double from the input/*from  w  w w .j a v a2  s. c o m*/
 * @param datainput
 * @return the double readen
 * @throws IOException
 */
public static double readDouble(DataInput datainput) throws IOException {
    DoubleWritable readenDouble = new DoubleWritable();
    readenDouble.readFields(datainput);
    return readenDouble.get();
}

From source file:com.mongodb.hadoop.examples.lolmatches.LOLMatchesdReducer.java

License:Apache License

@Override
public void reduce(final IntWritable pKey, final Iterable<DoubleWritable> pValues, final Context pContext)
        throws IOException, InterruptedException {

    int count = 0;
    double sum = 0;
    for (final DoubleWritable value : pValues) {
        sum += value.get();
        count++;//  ww  w.j  a v a2s  .c o  m
    }

    final double avg = sum / count;

    if (LOG.isDebugEnabled()) {
        LOG.debug("Average Kills for hour" + pKey.get() + " was " + avg);
    }

    BasicBSONObject output = new BasicBSONObject();
    output.put("count", count);
    output.put("avg", avg);
    output.put("sum", sum);
    reduceResult.setDoc(output);
    pContext.write(pKey, reduceResult);
}

From source file:com.mongodb.hadoop.examples.treasury.TreasuryYieldReducer.java

License:Apache License

@Override
public void reduce(final IntWritable pKey, final Iterable<DoubleWritable> pValues, final Context pContext)
        throws IOException, InterruptedException {

    int count = 0;
    double sum = 0;
    for (final DoubleWritable value : pValues) {
        sum += value.get();
        count++;/*from  w  w w  . java 2 s .  c  om*/
    }

    final double avg = sum / count;

    LOG.debug("Average 10 Year Treasury for " + pKey.get() + " was " + avg);

    BasicBSONObject output = new BasicBSONObject();
    output.put("count", count);
    output.put("avg", avg);
    output.put("sum", sum);
    pContext.write(pKey, new BSONWritable(output));
}

From source file:com.mongodb.hadoop.examples.treasury.TreasuryYieldUpdateReducer.java

License:Apache License

@Override
public void reduce(final IntWritable pKey, final Iterable<DoubleWritable> pValues, final Context pContext)
        throws IOException, InterruptedException {

    int count = 0;
    double sum = 0;
    for (final DoubleWritable value : pValues) {
        sum += value.get();
        count++;/*from   w  ww  . j a va 2s.  c  o  m*/
    }

    final double avg = sum / count;

    LOG.debug("Average 10 Year Treasury for " + pKey.get() + " was " + avg);

    BasicBSONObject query = new BasicBSONObject("_id", pKey.get());
    BasicBSONObject modifiers = new BasicBSONObject();
    modifiers.put("$set",
            BasicDBObjectBuilder.start().add("count", count).add("avg", avg).add("sum", sum).get());

    modifiers.put("$push", new BasicBSONObject("calculatedAt", new Date()));
    modifiers.put("$inc", new BasicBSONObject("numCalculations", 1));
    pContext.write(null, new MongoUpdateWritable(query, modifiers, true, false));
}

From source file:com.mongodb.hadoop.examples.TreasuryYieldReducer.java

License:Apache License

public void reduce(IntWritable key, Iterable<DoubleWritable> values, Context context)
        throws IOException, InterruptedException {

    int count = 0;
    double sum = 0;
    for (final DoubleWritable value : values) {
        log.debug("Key: " + key + " Value: " + value);
        sum += value.get();
        count++;//from  w  ww.  j a v a2 s . c  o m
    }

    double avg = sum / count;

    log.info("Average 10 Year Treasury for " + key.get() + " was " + avg);

    context.write(key, new DoubleWritable(avg));
}

From source file:com.mongodb.hadoop.examples.WorldDevIndicatorReducer.java

License:Apache License

@Override
public void reduce(final Text pCountryCode, final Iterable<DoubleWritable> pValues, final Context pContext)
        throws IOException, InterruptedException {
    double count = 0;
    double sum = 0;
    for (final DoubleWritable value : pValues) {
        sum += value.get();
        count++;//from w  w w  .j ava  2s  .c o m
    }

    final double avg = sum / count;

    pContext.write(pCountryCode, new DoubleWritable(avg));
}

From source file:com.moz.fiji.mapreduce.lib.reduce.DoubleSumReducer.java

License:Apache License

/** {@inheritDoc} */
@Override//from ww w  .j  a  v  a2s  .c  o  m
protected void reduce(K key, Iterable<DoubleWritable> values, Context context)
        throws IOException, InterruptedException {
    double sum = 0.0;
    for (DoubleWritable value : values) {
        sum += value.get();
    }
    mValue.set(sum);
    context.write(key, mValue);
}

From source file:com.mozilla.grouperfish.mahout.clustering.display.lda.DisplayLDATopics.java

License:Apache License

public static Map<Integer, PriorityQueue<Pair<Double, String>>> getTopWordsByTopics(String stateDirPath,
        Map<Integer, String> featureIndex, int numWordsToPrint) {
    Map<Integer, Double> expSums = new HashMap<Integer, Double>();
    Map<Integer, PriorityQueue<Pair<Double, String>>> queues = new HashMap<Integer, PriorityQueue<Pair<Double, String>>>();
    SequenceFileDirectoryReader reader = null;
    try {/*from  ww  w .j  a  va2  s.  co m*/
        IntPairWritable k = new IntPairWritable();
        DoubleWritable v = new DoubleWritable();
        reader = new SequenceFileDirectoryReader(new Path(stateDirPath));
        while (reader.next(k, v)) {
            int topic = k.getFirst();
            int featureId = k.getSecond();
            if (featureId >= 0 && topic >= 0) {
                double score = v.get();
                Double curSum = expSums.get(topic);
                if (curSum == null) {
                    curSum = 0.0;
                }
                expSums.put(topic, curSum + Math.exp(score));
                String feature = featureIndex.get(featureId);

                PriorityQueue<Pair<Double, String>> q = queues.get(topic);
                if (q == null) {
                    q = new PriorityQueue<Pair<Double, String>>(numWordsToPrint);
                }
                enqueue(q, feature, score, numWordsToPrint);
                queues.put(topic, q);
            }
        }
    } catch (IOException e) {
        LOG.error("Error reading LDA state dir", e);
    } finally {
        if (reader != null) {
            reader.close();
        }
    }

    for (Map.Entry<Integer, PriorityQueue<Pair<Double, String>>> entry : queues.entrySet()) {
        int topic = entry.getKey();
        for (Pair<Double, String> p : entry.getValue()) {
            double score = p.getFirst();
            p.setFirst(Math.exp(score) / expSums.get(topic));
        }
    }

    return queues;
}

From source file:com.soteradefense.dga.pr.PageRankComputation.java

License:Apache License

@Override
public void compute(Vertex<Text, DoubleWritable, Text> vertex, Iterable<DoubleWritable> messages)
        throws IOException {

    float dampingFactor = this.getConf().getFloat(DAMPING_FACTOR, DAMPING_FACTOR_DEFAULT_VALUE);

    long step = getSuperstep();

    if (step == 0) {
        //set initial value
        logger.debug("Superstep is 0: Setting the default value.");
        vertex.setValue(new DoubleWritable(1.0 / getTotalNumVertices()));
    } else { // go until no one votes to continue

        double rank = 0;
        for (DoubleWritable partial : messages) {
            rank += partial.get();
        }/*from   w w w  . j a v a 2 s. c  om*/
        rank = ((1 - dampingFactor) / getTotalNumVertices()) + (dampingFactor * rank);
        double vertexValue = vertex.getValue().get();
        double delta = Math.abs(rank - vertexValue) / vertexValue;
        aggregate(MAX_EPSILON, new DoubleWritable(delta));
        vertex.setValue(new DoubleWritable(rank));
        logger.debug("{} is calculated {} for a PageRank.", vertex.getId(), rank);
    }
    distributeRank(vertex);
}