Example usage for org.apache.commons.lang.mutable MutableDouble add

List of usage examples for org.apache.commons.lang.mutable MutableDouble add

Introduction

In this page you can find the example usage for org.apache.commons.lang.mutable MutableDouble add.

Prototype

public void add(Number operand) 

Source Link

Document

Adds a value.

Usage

From source file:com.datatorrent.lib.math.MarginKeyVal.java

/**
 * Adds the value for each key.//from   w  w w.  j  a v a2  s.  c om
 *
 * @param tuple
 * @param map
 */
public void addTuple(KeyValPair<K, V> tuple, Map<K, MutableDouble> map) {
    K key = tuple.getKey();
    if (!doprocessKey(key) || (tuple.getValue() == null)) {
        return;
    }
    MutableDouble val = map.get(key);
    if (val == null) {
        val = new MutableDouble(0.0);
        map.put(cloneKey(key), val);
    }
    val.add(tuple.getValue().doubleValue());
}

From source file:eu.project.ttc.models.Context.java

public void setCoOccurrences(String term, double coOccurrences, int mode) {
    if (!occurrences.containsKey(term))
        occurrences.put(term, new MutableDouble(0.0));

    MutableDouble coOcc = occurrences.get(term);

    if (mode == DEL_MODE) {
        coOcc.setValue(coOccurrences);//ww w . j  a  va 2  s . co m
    } else if (mode == ADD_MODE) {
        coOcc.add(coOccurrences);
    } else if (mode == MAX_MODE && coOccurrences > coOcc.doubleValue()) {
        coOcc.setValue(coOccurrences);
    } else if (mode == MIN_MODE && coOccurrences < coOcc.doubleValue()) {
        coOcc.setValue(coOccurrences);
    }
}

From source file:com.datatorrent.lib.math.MarginMap.java

/**
 * Adds the value for each key.//from w  w  w  .  j a  v a  2  s .c  o m
 * @param tuple
 * @param map
 */
public void addTuple(Map<K, V> tuple, Map<K, MutableDouble> map) {
    for (Map.Entry<K, V> e : tuple.entrySet()) {
        if (!doprocessKey(e.getKey()) || (e.getValue() == null)) {
            continue;
        }
        MutableDouble val = map.get(e.getKey());
        if (val == null) {
            val = new MutableDouble(0.0);
            map.put(cloneKey(e.getKey()), val);
        }
        val.add(e.getValue().doubleValue());
    }
}

From source file:com.datatorrent.lib.math.QuotientMap.java

/**
 * Add/Update entry to key/sum value map.
 * //from   w w w  .j  av  a  2  s. co m
 * @param key
 *          name.
 * @param value
 *          value for key.
 * @param map
 *          numerator/denominator key/sum map.
 */
public void addEntry(K key, V value, Map<K, MutableDouble> map) {
    if (!doprocessKey(key) || (value == null)) {
        return;
    }
    MutableDouble val = map.get(key);
    if (val == null) {
        if (countkey) {
            val = new MutableDouble(1.00);
        } else {
            val = new MutableDouble(value.doubleValue());
        }
    } else {
        if (countkey) {
            val.increment();
        } else {
            val.add(value.doubleValue());
        }
    }
    map.put(cloneKey(key), val);
}

From source file:com.datatorrent.apps.logstream.DimensionOperator.java

/**
 * Does computations for the given dimension and its value names on the given value key name
 *
 * @param timeBucket time bucket//w ww  . j  av  a 2 s .  c om
 * @param dimensionCombinationId id of dimension combination
 * @param dimValueName values of the dimension combination
 * @param valueKeyName name of the value key on which operations are performed
 * @param value value of the value key
 */
private void doComputations(String timeBucket, Integer dimensionCombinationId, String dimValueName,
        String valueKeyName, Number value) {
    StringBuilder sb = new StringBuilder();
    sb.append(timeBucket).append("|").append(recordType.get(LogstreamUtil.LOG_TYPE)).append("|")
            .append(recordType.get(LogstreamUtil.FILTER)).append("|").append(dimensionCombinationId).append("|")
            .append(valueKeyName);

    //final key format --> timebucket|type|filter|dimId|val
    //eg: m|201311230108|1|4|10|bytes
    String key = sb.toString();

    Map<AggregateOperation, Number> aggregations;

    if (cacheObject.containsKey(key)) {
        Map<String, Map<AggregateOperation, Number>> dimValueNames = cacheObject.get(key);
        if (dimValueNames.containsKey(dimValueName)) {
            aggregations = dimValueNames.get(dimValueName);
        } else {
            aggregations = new HashMap<AggregateOperation, Number>();
            for (AggregateOperation aggregationType : valueOperationTypes.get(valueKeyName)) {
                aggregations.put(aggregationType, new MutableDouble(0));
            }

            dimValueNames.put(dimValueName, aggregations);
        }
    } else {
        Map<String, Map<AggregateOperation, Number>> newDimValueNames = new HashMap<String, Map<AggregateOperation, Number>>();
        aggregations = new HashMap<AggregateOperation, Number>();
        for (AggregateOperation aggregationType : valueOperationTypes.get(valueKeyName)) {
            aggregations.put(aggregationType, new MutableDouble(0));
        }
        newDimValueNames.put(dimValueName, aggregations);
        cacheObject.put(key, newDimValueNames);
    }

    if (aggregations.containsKey(AggregateOperation.SUM)) {
        MutableDouble aggrVal = (MutableDouble) aggregations.get(AggregateOperation.SUM);
        aggrVal.add(value);
    }

    if (aggregations.containsKey(AggregateOperation.COUNT)) {
        MutableDouble aggrVal = (MutableDouble) aggregations.get(AggregateOperation.COUNT);
        aggrVal.add(1);
    }

    if (aggregations.containsKey(AggregateOperation.AVERAGE)) {
        double avgVal = aggregations.get(AggregateOperation.AVERAGE).doubleValue();
        double countVal = aggregations.get(AggregateOperation.COUNT).doubleValue();
        double newAvg = ((avgVal * (countVal - 1)) + value.doubleValue()) / countVal;
        aggregations.put(AggregateOperation.AVERAGE, new MutableDouble(newAvg));
    }

}

From source file:org.apache.mahout.classifier.bayes.algorithm.BayesAlgorithm.java

@Override
public double documentWeight(final Datastore datastore, final String label, String[] document)
        throws InvalidDatastoreException {
    OpenObjectIntHashMap<String> wordList = new OpenObjectIntHashMap<String>(document.length / 2);
    for (String word : document) {
        if (wordList.containsKey(word)) {
            wordList.put(word, wordList.get(word) + 1);
        } else {/*w  w  w . j  a v a2s.co  m*/
            wordList.put(word, 1);
        }
    }
    final MutableDouble result = new MutableDouble(0.0);

    wordList.forEachPair(new ObjectIntProcedure<String>() {

        @Override
        public boolean apply(String word, int frequency) {
            try {
                result.add(frequency * featureWeight(datastore, label, word));
            } catch (InvalidDatastoreException e) {
                throw new IllegalStateException(e);
            }
            return true;
        }
    });
    return result.doubleValue();
}

From source file:org.apache.mahout.classifier.bayes.BayesAlgorithm.java

@Override
public double documentWeight(final Datastore datastore, final String label, String[] document) {
    OpenObjectIntHashMap<String> wordList = new OpenObjectIntHashMap<String>(document.length / 2);
    for (String word : document) {
        wordList.adjustOrPutValue(word, 1, 1);
    }//from   w w  w  . j  ava 2s  .c o  m
    final MutableDouble result = new MutableDouble(0.0);

    wordList.forEachPair(new ObjectIntProcedure<String>() {
        @Override
        public boolean apply(String word, int frequency) {
            try {
                result.add(frequency * featureWeight(datastore, label, word));
            } catch (InvalidDatastoreException e) {
                throw new IllegalStateException(e);
            }
            return true;
        }
    });
    return result.doubleValue();
}

From source file:org.apache.mahout.classifier.bayes.mapreduce.common.BayesFeatureMapper.java

/**
 * We need to count the number of times we've seen a term with a given label and we need to output that. But
 * this Mapper does more than just outputing the count. It first does weight normalisation. Secondly, it
 * outputs for each unique word in a document value 1 for summing up as the Term Document Frequency. Which
 * later is used to calculate the Idf Thirdly, it outputs for each label the number of times a document was
 * seen(Also used in Idf Calculation)//from  w w  w  .  j a v  a2s.  c o  m
 * 
 * @param key
 *          The label
 * @param value
 *          the features (all unique) associated w/ this label in stringtuple format
 * @param output
 *          The OutputCollector to write the results to
 * @param reporter
 *          Not used
 */
@Override
public void map(Text key, Text value, final OutputCollector<StringTuple, DoubleWritable> output,
        Reporter reporter) throws IOException {
    final String label = key.toString();
    String[] tokens = SPACE_TAB.split(value.toString());
    OpenObjectIntHashMap<String> wordList = new OpenObjectIntHashMap<String>(tokens.length * gramSize);

    if (gramSize > 1) {
        ShingleFilter sf = new ShingleFilter(new IteratorTokenStream(Iterators.forArray(tokens)), gramSize);
        do {
            String term = sf.getAttribute(CharTermAttribute.class).toString();
            if (!term.isEmpty()) {
                if (wordList.containsKey(term)) {
                    wordList.put(term, 1 + wordList.get(term));
                } else {
                    wordList.put(term, 1);
                }
            }
        } while (sf.incrementToken());
    } else {
        for (String term : tokens) {
            if (wordList.containsKey(term)) {
                wordList.put(term, 1 + wordList.get(term));
            } else {
                wordList.put(term, 1);
            }
        }
    }
    final MutableDouble lengthNormalisationMut = new MutableDouble(0.0);
    wordList.forEachPair(new ObjectIntProcedure<String>() {
        @Override
        public boolean apply(String word, int dKJ) {
            long squared = (long) dKJ * (long) dKJ;
            lengthNormalisationMut.add(squared);
            return true;
        }
    });

    final double lengthNormalisation = Math.sqrt(lengthNormalisationMut.doubleValue());

    // Output Length Normalized + TF Transformed Frequency per Word per Class
    // Log(1 + D_ij)/SQRT( SIGMA(k, D_kj) )
    wordList.forEachPair(new ObjectIntProcedure<String>() {
        @Override
        public boolean apply(String token, int dKJ) {
            try {
                StringTuple tuple = new StringTuple();
                tuple.add(BayesConstants.WEIGHT);
                tuple.add(label);
                tuple.add(token);
                DoubleWritable f = new DoubleWritable(Math.log1p(dKJ) / lengthNormalisation);
                output.collect(tuple, f);
            } catch (IOException e) {
                throw new IllegalStateException(e);
            }
            return true;
        }
    });
    reporter.setStatus("Bayes Feature Mapper: Document Label: " + label);

    // Output Document Frequency per Word per Class
    // Corpus Document Frequency (FEATURE_COUNT)
    // Corpus Term Frequency (FEATURE_TF)
    wordList.forEachPair(new ObjectIntProcedure<String>() {
        @Override
        public boolean apply(String token, int dKJ) {
            try {
                StringTuple dfTuple = new StringTuple();
                dfTuple.add(BayesConstants.DOCUMENT_FREQUENCY);
                dfTuple.add(label);
                dfTuple.add(token);
                output.collect(dfTuple, ONE);

                StringTuple tokenCountTuple = new StringTuple();
                tokenCountTuple.add(BayesConstants.FEATURE_COUNT);
                tokenCountTuple.add(token);
                output.collect(tokenCountTuple, ONE);

                StringTuple tokenTfTuple = new StringTuple();
                tokenTfTuple.add(BayesConstants.FEATURE_TF);
                tokenTfTuple.add(token);
                output.collect(tokenTfTuple, new DoubleWritable(dKJ));
            } catch (IOException e) {
                throw new IllegalStateException(e);
            }
            return true;
        }
    });

    // output that we have seen the label to calculate the Count of Document per
    // class
    StringTuple labelCountTuple = new StringTuple();
    labelCountTuple.add(BayesConstants.LABEL_COUNT);
    labelCountTuple.add(label);
    output.collect(labelCountTuple, ONE);
}

From source file:org.matsim.contrib.av.robotaxi.scoring.TaxiFareHandlerTest.java

/**
 * Test method for {@link org.matsim.contrib.av.robotaxi.scoring.TaxiFareHandler#TaxiFareHandler(org.matsim.core.config.Config)}.
 *///from   w w w  . j a  va 2 s. c o  m
@Test
public void testTaxiFareHandler() {
    Network network = createNetwork();
    Config config = ConfigUtils.createConfig();
    TaxiFareConfigGroup tccg = new TaxiFareConfigGroup();
    config.addModule(tccg);
    tccg.setBasefare(1);
    tccg.setDailySubscriptionFee(1);
    tccg.setDistanceFare_m(1.0 / 1000.0);
    tccg.setTimeFare_h(36);
    DvrpConfigGroup dvrp = new DvrpConfigGroup();
    dvrp.setMode("taxi");
    config.addModule(dvrp);
    final MutableDouble fare = new MutableDouble(0);
    EventsManager events = EventsUtils.createEventsManager();
    TaxiFareHandler tfh = new TaxiFareHandler(config, events, network);
    events.addHandler(tfh);
    events.addHandler(new PersonMoneyEventHandler() {

        @Override
        public void handleEvent(PersonMoneyEvent event) {
            fare.add(event.getAmount());

        }

        @Override
        public void reset(int iteration) {
        }
    });
    Id<Person> p1 = Id.createPersonId("p1");
    Id<Vehicle> t1 = Id.createVehicleId("v1");
    events.processEvent(new PersonDepartureEvent(0.0, p1, Id.createLinkId("12"), dvrp.getMode()));
    events.processEvent(new PersonEntersVehicleEvent(60.0, p1, t1));
    events.processEvent(new LinkEnterEvent(61, t1, Id.createLinkId("23")));
    events.processEvent(new PersonArrivalEvent(120.0, p1, Id.createLinkId("23"), dvrp.getMode()));

    events.processEvent(new PersonDepartureEvent(180.0, p1, Id.createLinkId("12"), dvrp.getMode()));
    events.processEvent(new PersonEntersVehicleEvent(240.0, p1, t1));
    events.processEvent(new LinkEnterEvent(241, t1, Id.createLinkId("23")));
    events.processEvent(new PersonArrivalEvent(300.0, p1, Id.createLinkId("23"), dvrp.getMode()));

    //fare: 1 (daily fee) +2*1(basefare)+ 2*1 (distance) + (36/60)*2 = -(1+2+2+0,12) = -6.2 
    Assert.assertEquals(-6.2, fare.getValue());
}