Example usage for org.apache.commons.math3.stat.descriptive SummaryStatistics getMean

List of usage examples for org.apache.commons.math3.stat.descriptive SummaryStatistics getMean

Introduction

In this page you can find the example usage for org.apache.commons.math3.stat.descriptive SummaryStatistics getMean.

Prototype

public double getMean() 

Source Link

Document

Returns the mean of the values that have been added.

Usage

From source file:com.sop4j.SimpleStatistics.java

public static void main(String[] args) {
    final MersenneTwister rng = new MersenneTwister(); // used for RNG... READ THE DOCS!!!
    final int[] values = new int[NUM_VALUES];

    final DescriptiveStatistics descriptiveStats = new DescriptiveStatistics(); // stores values
    final SummaryStatistics summaryStats = new SummaryStatistics(); // doesn't store values
    final Frequency frequency = new Frequency();

    // add numbers into our stats
    for (int i = 0; i < NUM_VALUES; ++i) {
        values[i] = rng.nextInt(MAX_VALUE);

        descriptiveStats.addValue(values[i]);
        summaryStats.addValue(values[i]);
        frequency.addValue(values[i]);// w  w w. j a v  a2  s . c  o m
    }

    // print out some standard stats
    System.out.println("MIN: " + summaryStats.getMin());
    System.out.println("AVG: " + String.format("%.3f", summaryStats.getMean()));
    System.out.println("MAX: " + summaryStats.getMax());

    // get some more complex stats only offered by DescriptiveStatistics
    System.out.println("90%: " + descriptiveStats.getPercentile(90));
    System.out.println("MEDIAN: " + descriptiveStats.getPercentile(50));
    System.out.println("SKEWNESS: " + String.format("%.4f", descriptiveStats.getSkewness()));
    System.out.println("KURTOSIS: " + String.format("%.4f", descriptiveStats.getKurtosis()));

    // quick and dirty stats (need a little help from Guava to convert from int[] to double[])
    System.out.println("MIN: " + StatUtils.min(Doubles.toArray(Ints.asList(values))));
    System.out.println("AVG: " + String.format("%.4f", StatUtils.mean(Doubles.toArray(Ints.asList(values)))));
    System.out.println("MAX: " + StatUtils.max(Doubles.toArray(Ints.asList(values))));

    // some stats based upon frequencies
    System.out.println("NUM OF 7s: " + frequency.getCount(7));
    System.out.println("CUMULATIVE FREQUENCY OF 7: " + frequency.getCumFreq(7));
    System.out.println("PERCENTAGE OF 7s: " + frequency.getPct(7));
}

From source file:com.civprod.writerstoolbox.testarea.UnsupervisedDiscourseSegmentation.java

public static List<List<String>> segment(Document<?> inDocument, SentenceDetector inSentenceDetector,
        StringTokenizer inStringTokenizer) {
    List<String> concatenateTokens = concatenateTokens(inDocument, inSentenceDetector, inStringTokenizer);
    List<String> stemmAndFilterList = TokenUtil.stemmAndFilterList(concatenateTokens);
    List<List<String>> splitIntoFixLengthLists = splitIntoFixLengthLists(stemmAndFilterList, 20);
    List<Counter<String>> counters = splitIntoFixLengthLists.parallelStream()
            .map((List<String> curSentence) -> CounterUtils.count(curSentence)).collect(Collectors.toList());
    List<Double> cosineSimilarity = new ArrayList<>(counters.size() - 20);
    for (int i = 0; i < (counters.size() - 20); i++) {
        cosineSimilarity.add(cosineSimilarityStemmedAndFiltered(Counter.join(counters.subList(i, i + 10)),
                Counter.join(counters.subList(i + 11, i + 20))));
    }// w w  w .ja  v  a2  s.c  o m
    List<Double> valleys = new ArrayList<>(cosineSimilarity.size() - 2);
    for (int i = 0; i < valleys.size(); i++) {
        double ya1 = cosineSimilarity.get(i);
        double ya2 = cosineSimilarity.get(i + 1);
        double ya3 = cosineSimilarity.get(i + 2);
        valleys.add((ya1 - ya2) + (ya3 - ya2));
    }
    SummaryStatistics valleyStatistics = valleys.parallelStream().collect(SummaryStatisticCollector.instance);
    double cutoffThreshold = valleyStatistics.getMean() - valleyStatistics.getStandardDeviation();
    int lastLocation = 0;
    List<Span> spans = new ArrayList<>(1);
    for (int i = 0; i < valleys.size(); i++) {
        double curValley = valleys.get(i);
        if (curValley < cutoffThreshold) {
            int curLocation = (i + 11) * 20;
            spans.add(new Span(lastLocation, curLocation));
            lastLocation = curLocation;
        }
    }
    spans.add(new Span(lastLocation, concatenateTokens.size()));
    return spans.parallelStream()
            .map((Span curSpan) -> concatenateTokens.subList(curSpan.getStart(), curSpan.getEnd()))
            .collect(Collectors.toList());
}

From source file:net.recommenders.rival.evaluation.statistics.EffectSize.java

/**
 *
 * Estimation of effect size based on the distribution of score differences
 * (from paired samples)./*from   w w  w.  ja v a2s  . c  o  m*/
 *
 * @param <V> type of the keys of each map.
 * @param baselineMetricPerDimension map for the baseline method, one value
 * for each user (dimension)
 * @param testMetricPerDimension map for the test method, one value for each
 * user (dimension)
 * @return the effect size.
 */
public static <V> double getEffectSizePairedT(final Map<V, Double> baselineMetricPerDimension,
        final Map<V, Double> testMetricPerDimension) {
    Set<V> overlap = new HashSet<V>(baselineMetricPerDimension.keySet());
    overlap.retainAll(testMetricPerDimension.keySet());

    SummaryStatistics differences = new SummaryStatistics();
    for (V key : overlap) {
        double diff = testMetricPerDimension.get(key) - baselineMetricPerDimension.get(key);
        differences.addValue(diff);
    }

    return getEffectSizePairedT(differences.getMean(), Math.sqrt(differences.getVariance()));
}

From source file:com.github.rinde.dynurg.PoissonDynamismExperiment.java

static void createDynamismHistogram(TimeSeriesGenerator generator, long seed, File file, int repetitions) {
    try {// w ww.j  av a2 s  . com
        Files.createParentDirs(file);
    } catch (final IOException e1) {
        throw new IllegalStateException(e1);
    }
    final RandomGenerator rng = new MersenneTwister(seed);
    final List<Double> values = newArrayList();
    final SummaryStatistics ss = new SummaryStatistics();
    for (int i = 0; i < repetitions; i++) {
        final List<Double> times = generator.generate(rng.nextLong());
        ss.addValue(times.size());
        final double dynamism = Metrics.measureDynamism(times, LENGTH_OF_DAY);
        values.add(dynamism);
    }
    System.out.println(
            file.getName() + " has #events: mean: " + ss.getMean() + " +- " + ss.getStandardDeviation());

    final StringBuilder sb = new StringBuilder();
    sb.append(Joiner.on("\n").join(values));
    try {
        Files.write(sb.toString(), file, Charsets.UTF_8);
    } catch (final IOException e) {
        throw new IllegalStateException(e);
    }
}

From source file:gr.aueb.cs.nlp.wordtagger.data.structure.features.FeatureBuilder.java

/**
 * normalize feature vectors of words using mean and stadard devaition
 *  provided the vectora values are//from   w w w  . java2 s. c o m
 * higher than 1.
 * @param words
 * @param indeces
 */
private static void normalize(List<Word> words, List<Integer> indeces) {
    Map<Integer, Double> means = new WeakHashMap<>();
    Map<Integer, Double> stds = new WeakHashMap<>();
    for (Integer i : indeces) {
        SummaryStatistics smt = new SummaryStatistics();
        for (Word w : words) {
            smt.addValue(w.getFeatureVec().getValues()[i]);
        }
        means.put(i, smt.getMean());
        stds.put(i, smt.getStandardDeviation());
    }
    for (Integer i : indeces) {
        for (Word w : words) {
            double value = w.getFeatureVec().getValues()[i];
            w.getFeatureVec().getValues()[i] = (value - means.get(i)) / stds.get(i);
        }
    }
}

From source file:co.turnus.common.util.CommonDataUtil.java

public static StatisticalData createFrom(SummaryStatistics summary) {
    StatisticalData data = CommonFactory.eINSTANCE.createStatisticalData();
    if (summary.getN() != 0) {
        data.setMax(summary.getMax());/*from  w  w w.j av a 2  s  .  co  m*/
        data.setMin(summary.getMin());
        data.setSamples(summary.getN());
        data.setSum(summary.getSum());
        data.setVariance(summary.getVariance());
        data.setMean(summary.getMean());
    }
    return data;
}

From source file:net.recommenders.rival.evaluation.statistics.EffectSize.java

/**
 * Computes Cohen's d, either the classical formulation (dividing the pooled
 * standard deviation by the sum of the number of samples) or using the
 * least squares estimation (substracting 2 to the sum of the number of
 * samples when normalizing the pooled standard deviation).
 *
 * @param <V> type of the keys of each map.
 * @param baselineMetricPerDimension map for the baseline method, one value
 * for each user (dimension)// w w  w.j a  va 2  s . c  om
 * @param testMetricPerDimension map for the test method, one value for each
 * user (dimension)
 * @param doLeastSquares flag to use one formulation or the other (see
 * description above)
 * @return the computed Cohen's d as estimation of the effect size..
 */
public static <V> double getCohenD(final Map<V, Double> baselineMetricPerDimension,
        final Map<V, Double> testMetricPerDimension, final boolean doLeastSquares) {
    SummaryStatistics statsBaseline = new SummaryStatistics();
    for (double d : baselineMetricPerDimension.values()) {
        statsBaseline.addValue(d);
    }
    SummaryStatistics statsTest = new SummaryStatistics();
    for (double d : testMetricPerDimension.values()) {
        statsTest.addValue(d);
    }
    if (doLeastSquares) {
        return getCohenDLeastSquares((int) statsBaseline.getN(), statsBaseline.getMean(),
                statsBaseline.getStandardDeviation(), (int) statsTest.getN(), statsTest.getMean(),
                statsTest.getStandardDeviation());
    }
    return getCohenD((int) statsBaseline.getN(), statsBaseline.getMean(), statsBaseline.getStandardDeviation(),
            (int) statsTest.getN(), statsTest.getMean(), statsTest.getStandardDeviation());
}

From source file:model.experiments.WhenDoesAveragingMatters.java

public static void learnedRun(int competitors, Class<? extends AskPricingStrategy> pricing,
        int weightedAverageSize) {

    final MacroII macroII = new MacroII(System.currentTimeMillis());
    final TripolistScenario scenario1 = new TripolistScenario(macroII);
    scenario1.setSalesDepartmentType(SalesDepartmentOneAtATime.class);
    scenario1.setAskPricingStrategy(pricing);
    scenario1.setControlType(MonopolistScenario.MonopolistScenarioIntegratedControlEnum.MARGINAL_PLANT_CONTROL);
    scenario1.setAdditionalCompetitors(competitors);
    scenario1.setWorkersToBeRehiredEveryDay(true);
    scenario1.setDemandIntercept(102);//from w  ww  . j  av  a  2 s .co  m

    scenario1.setSalesPricePreditorStrategy(FixedDecreaseSalesPredictor.class);

    //assign scenario
    macroII.setScenario(scenario1);

    macroII.start();

    macroII.schedule.step(macroII);
    for (Firm firm : scenario1.getCompetitors()) {
        for (HumanResources hr : firm.getHRs())
            hr.setPredictor(new FixedIncreasePurchasesPredictor(0));

        final SalesDepartment salesDepartment = firm.getSalesDepartment(UndifferentiatedGoodType.GENERIC);
        salesDepartment.setPriceAverager(new WeightedPriceAverager(weightedAverageSize));
        salesDepartment.setPredictorStrategy(new FixedDecreaseSalesPredictor(0));
    }

    while (macroII.schedule.getTime() < 5000) {
        macroII.schedule.step(macroII);

    }

    SummaryStatistics prices = new SummaryStatistics();
    SummaryStatistics quantities = new SummaryStatistics();
    for (int j = 0; j < 500; j++) {
        macroII.schedule.step(macroII);
        assert !Float.isNaN(macroII.getMarket(UndifferentiatedGoodType.GENERIC).getTodayAveragePrice());
        prices.addValue(macroII.getMarket(UndifferentiatedGoodType.GENERIC).getTodayAveragePrice());
        quantities.addValue(macroII.getMarket(UndifferentiatedGoodType.GENERIC).getTodayVolume());

    }

    System.out.println(prices.getMean() + " - " + quantities.getMean() + "----" + macroII.seed() + " | "
            + macroII.getMarket(UndifferentiatedGoodType.GENERIC).getLastDaysAveragePrice());
    System.out.println("standard deviations: price : " + prices.getStandardDeviation() + " , quantity: "
            + quantities.getStandardDeviation());

}

From source file:model.experiments.tuningRuns.CompetitiveAveragingGridSearch.java

public static CompetitiveAveragingResult intervalRuns(float hrWeight, float salesWeight) {

    SummaryStatistics averageResultingPrice = new SummaryStatistics();
    SummaryStatistics averageResultingQuantity = new SummaryStatistics();
    SummaryStatistics averageStandardDeviation = new SummaryStatistics();
    for (int i = 0; i < 5; i++) {
        final MacroII macroII = new MacroII(System.currentTimeMillis());
        final TripolistScenario scenario1 = new TripolistScenario(macroII);

        scenario1.setSalesDepartmentType(SalesDepartmentOneAtATime.class);
        scenario1.setAskPricingStrategy(SalesControlWithFixedInventoryAndPID.class);
        scenario1.setControlType(/*from   w  ww.  j a  v  a  2 s  .  c  o  m*/
                MonopolistScenario.MonopolistScenarioIntegratedControlEnum.MARGINAL_PLANT_CONTROL);
        scenario1.setAdditionalCompetitors(4);
        scenario1.setWorkersToBeRehiredEveryDay(true);
        scenario1.setDemandIntercept(102);

        scenario1.setSalesPricePreditorStrategy(FixedDecreaseSalesPredictor.class);

        //assign scenario
        macroII.setScenario(scenario1);

        macroII.start();

        macroII.schedule.step(macroII);
        for (Firm firm : scenario1.getCompetitors()) {
            for (HumanResources hr : firm.getHRs()) {
                hr.setPredictor(new FixedIncreasePurchasesPredictor(0));
                hr.setPriceAverager(new AveragerOverSmallIntervalOnly(hrWeight));
            }
            firm.getSalesDepartment(UndifferentiatedGoodType.GENERIC)
                    .setPriceAverager(new AveragerOverSmallIntervalOnly(salesWeight));
            firm.getSalesDepartment(UndifferentiatedGoodType.GENERIC)
                    .setPredictorStrategy(new FixedDecreaseSalesPredictor(0));
        }

        while (macroII.schedule.getTime() < 10000) {
            macroII.schedule.step(macroII);
        }

        SummaryStatistics prices = new SummaryStatistics();
        SummaryStatistics quantities = new SummaryStatistics();
        for (int j = 0; j < 500; j++) {
            macroII.schedule.step(macroII);
            prices.addValue(macroII.getMarket(UndifferentiatedGoodType.GENERIC).getTodayAveragePrice());
            quantities.addValue(macroII.getMarket(UndifferentiatedGoodType.GENERIC).getTodayVolume());

        }

        //okay?
        averageResultingPrice.addValue(prices.getMean());
        averageResultingQuantity.addValue(quantities.getMean());
        averageStandardDeviation.addValue(prices.getStandardDeviation());

    }

    //okay?
    return new CompetitiveAveragingResult(averageResultingPrice.getMean(), averageResultingQuantity.getMean(),
            averageStandardDeviation.getMean());

}

From source file:model.experiments.tuningRuns.CompetitiveAveragingGridSearch.java

public static CompetitiveAveragingResult weightedRun(int hrDays, int salesDays, boolean decoratedHr,
        boolean decoratedSales) {

    SummaryStatistics averageResultingPrice = new SummaryStatistics();
    SummaryStatistics averageResultingQuantity = new SummaryStatistics();
    SummaryStatistics averageStandardDeviation = new SummaryStatistics();
    for (int i = 0; i < 5; i++) {
        final MacroII macroII = new MacroII(System.currentTimeMillis());
        final TripolistScenario scenario1 = new TripolistScenario(macroII);

        scenario1.setSalesDepartmentType(SalesDepartmentOneAtATime.class);
        scenario1.setAskPricingStrategy(SalesControlWithFixedInventoryAndPID.class);
        scenario1.setControlType(//from w w w  .  ja v  a  2s .c om
                MonopolistScenario.MonopolistScenarioIntegratedControlEnum.MARGINAL_PLANT_CONTROL);
        scenario1.setAdditionalCompetitors(4);
        scenario1.setWorkersToBeRehiredEveryDay(true);
        scenario1.setDemandIntercept(102);

        scenario1.setSalesPricePreditorStrategy(FixedDecreaseSalesPredictor.class);

        //assign scenario
        macroII.setScenario(scenario1);

        macroII.start();

        macroII.schedule.step(macroII);
        for (Firm firm : scenario1.getCompetitors()) {
            for (HumanResources hr : firm.getHRs()) {
                hr.setPredictor(new FixedIncreasePurchasesPredictor(0));
                PriceAverager priceAverager = new WeightedPriceAverager(hrDays);
                if (decoratedHr)
                    priceAverager = new NoTradingOverrideAveragerDecorator(priceAverager);
                hr.setPriceAverager(priceAverager);
            }
            PriceAverager priceAverager = new WeightedPriceAverager(salesDays);
            if (decoratedSales)
                priceAverager = new NoTradingOverrideAveragerDecorator(priceAverager);
            firm.getSalesDepartment(UndifferentiatedGoodType.GENERIC).setPriceAverager(priceAverager);
            firm.getSalesDepartment(UndifferentiatedGoodType.GENERIC)
                    .setPredictorStrategy(new FixedDecreaseSalesPredictor(0));
        }

        while (macroII.schedule.getTime() < 10000) {
            macroII.schedule.step(macroII);
        }

        SummaryStatistics prices = new SummaryStatistics();
        SummaryStatistics quantities = new SummaryStatistics();
        for (int j = 0; j < 500; j++) {
            macroII.schedule.step(macroII);
            prices.addValue(macroII.getMarket(UndifferentiatedGoodType.GENERIC).getTodayAveragePrice());
            quantities.addValue(macroII.getMarket(UndifferentiatedGoodType.GENERIC).getTodayVolume());

        }

        //okay?
        averageResultingPrice.addValue(prices.getMean());
        averageResultingQuantity.addValue(quantities.getMean());
        averageStandardDeviation.addValue(prices.getStandardDeviation());

    }

    //okay?
    return new CompetitiveAveragingResult(averageResultingPrice.getMean(), averageResultingQuantity.getMean(),
            averageStandardDeviation.getMean());

}