Example usage for org.apache.commons.math3.stat.descriptive DescriptiveStatistics DescriptiveStatistics

List of usage examples for org.apache.commons.math3.stat.descriptive DescriptiveStatistics DescriptiveStatistics

Introduction

In this page you can find the example usage for org.apache.commons.math3.stat.descriptive DescriptiveStatistics DescriptiveStatistics.

Prototype

public DescriptiveStatistics() 

Source Link

Document

Construct a DescriptiveStatistics instance with an infinite window

Usage

From source file:com.screenslicer.core.util.StringUtil.java

public static void trimLargeItems(int[] stringLengths, List<? extends Object> originals) {
    DescriptiveStatistics stats = new DescriptiveStatistics();
    for (int i = 0; i < stringLengths.length; i++) {
        stats.addValue(stringLengths[i]);
    }/*from w ww. j av a  2  s  . com*/
    double stdDev = stats.getStandardDeviation();
    double mean = stats.getMean();
    List<Object> toRemove = new ArrayList<Object>();
    for (int i = 0; i < stringLengths.length; i++) {
        double diff = stringLengths[i] - mean;
        if (diff / stdDev > 4d) {
            toRemove.add(originals.get(i));
        }
    }
    for (Object obj : toRemove) {
        originals.remove(obj);
    }
}

From source file:io.hops.experiments.stats.TransactionStatsAggregator.java

public static Map<String, DescriptiveStatistics> aggregate(File statsFile, String headerPattern,
        String transaction, boolean printSummary) throws IOException {
    if (!statsFile.exists())
        return null;

    transaction = transaction.toUpperCase();

    BufferedReader reader = new BufferedReader(new FileReader(statsFile));
    String tx = reader.readLine();
    String[] headers = null;/* w  w w. ja  v a2 s  .c  o  m*/
    Map<Integer, DescriptiveStatistics> statistics = Maps.newHashMap();
    if (tx != null) {
        headers = tx.split(",");
        for (int i = 1; i < headers.length; i++) {
            String h = headers[i].toUpperCase();
            if (h.contains(headerPattern) || headerPattern.equals(ALL)) {
                statistics.put(i, new DescriptiveStatistics());
            }
        }
    }

    int txCount = 0;
    while ((tx = reader.readLine()) != null) {
        if (tx.startsWith(transaction) || transaction.equals(ALL)) {
            txCount++;
            String[] txStats = tx.split(",");
            if (txStats.length == headers.length) {
                for (Map.Entry<Integer, DescriptiveStatistics> e : statistics.entrySet()) {
                    e.getValue().addValue(Double.valueOf(txStats[e.getKey()]));
                }
            }
        }
    }

    reader.close();

    if (headers == null)
        return null;

    if (printSummary) {
        System.out.println("Transaction: " + transaction + " " + txCount);

        List<Integer> keys = new ArrayList<Integer>(statistics.keySet());
        Collections.sort(keys);

        for (Integer i : keys) {
            DescriptiveStatistics stats = statistics.get(i);
            if (stats.getMin() == 0 && stats.getMax() == 0) {
                continue;
            }
            System.out.println(headers[i]);
            System.out.println("Min " + stats.getMin() + " Max " + stats.getMax() + " Avg " + stats.getMean()
                    + " Std " + stats.getStandardDeviation());
        }
    }

    Map<String, DescriptiveStatistics> annotatedStats = Maps.newHashMap();
    for (Map.Entry<Integer, DescriptiveStatistics> e : statistics.entrySet()) {
        annotatedStats.put(headers[e.getKey()].trim(), e.getValue());
    }
    return annotatedStats;
}

From source file:com.intuit.tank.persistence.databases.BucketDataItemTest.java

/**
 * Run the Date getStartTime() method test.
 * /*  w  w w.j a  va2s . c om*/
 * @throws Exception
 * 
 * @generatedBy CodePro at 9/10/14 10:32 AM
 */
@Test
public void testGetStartTime_1() throws Exception {
    BucketDataItem fixture = new BucketDataItem(1, new Date(), new DescriptiveStatistics());

    Date result = fixture.getStartTime();

    assertNotNull(result);
}

From source file:fr.inria.eventcloud.benchmarks.performance_tuning.QuadrupleStatsAnalyzer.java

public QuadrupleStatsAnalyzer() {
    this.graphLength = new DescriptiveStatistics();
    this.subjectLength = new DescriptiveStatistics();
    this.predicateLength = new DescriptiveStatistics();
    this.objectLength = new DescriptiveStatistics();

    this.quadsLength = new DescriptiveStatistics();
    // SummaryStatistics
}

From source file:io.yields.math.concepts.operator.Smoothness.java

@Override
public DescriptiveStatistics apply(Collection<Tuple> tuples) {
    Validate.isTrue(tuples.size() > order);
    //first we normalize the tuples so data fits in the unit square
    List<Tuple> normalizedData = normalize(tuples);
    //calculate error (i.e. distance between tuple y and fitted polynomial
    RealMatrix error = computeDistance(normalizedData);
    //group in stats object
    DescriptiveStatistics stats = new DescriptiveStatistics();
    for (double value : error.getColumn(0)) {
        stats.addValue(Math.abs(value));
    }//ww  w.ja  va2 s .  c  om
    return stats;
}

From source file:cz.cuni.mff.d3s.tools.perfdoc.server.measuring.statistics.Statistics.java

public long computeMean() {
    if (measurementResults.isEmpty()) {
        return -1;
    }//from   w ww.  jav  a 2s .c o  m

    DescriptiveStatistics stats = new DescriptiveStatistics();
    for (Long l : measurementResults) {
        stats.addValue(l);
    }

    return (long) stats.getPercentile(50);
}

From source file:com.soulgalore.web.pagesavings.googlepagespeed.GooglePageSpeedSavingsCollector.java

@Inject
public GooglePageSpeedSavingsCollector(HTTPBodyFetcher fetcher, SiteResultCollector siteResultCollector,
        @Named("com.soulgalore.web.savings.googlekey") String key) {
    this.fetcher = fetcher;
    this.siteResultCollector = siteResultCollector;
    this.key = key;

    for (String rule : GooglePageSpeedSiteResultCollector.RULES) {
        statistics.put(rule, new DescriptiveStatistics());
    }// ww  w .j a v a  2 s  .  c o  m

}

From source file:com.caseystella.analytics.outlier.streaming.mad.SketchyMovingMADTest.java

@Test
public void testSketchyMovingMAD() throws IOException {
    Random r = new Random(0);
    List<DataPoint> points = new ArrayList<>();
    DescriptiveStatistics stats = new DescriptiveStatistics();
    DescriptiveStatistics medianStats = new DescriptiveStatistics();
    OutlierConfig config = JSONUtil.INSTANCE.load(madConfig, OutlierConfig.class);
    SketchyMovingMAD madAlgo = ((SketchyMovingMAD) config.getSketchyOutlierAlgorithm()).withConfig(config);
    int i = 0;/*from ww w . j av  a 2s. co  m*/
    for (i = 0; i < 10000; ++i) {
        double val = r.nextDouble() * 1000 - 10000;
        stats.addValue(val);
        DataPoint dp = (new DataPoint(i, val, null, "foo"));
        madAlgo.analyze(dp);
        points.add(dp);
    }
    for (DataPoint dp : points) {
        medianStats.addValue(Math.abs(dp.getValue() - stats.getPercentile(50)));
    }
    double mad = medianStats.getPercentile(50);
    double median = stats.getPercentile(50);
    {
        double val = getValAtModifiedZScore(3.6, mad, median);
        System.out.println("MODERATE => " + val);
        DataPoint dp = (new DataPoint(i++, val, null, "foo"));
        Severity s = madAlgo.analyze(dp).getSeverity();
        Assert.assertTrue(s == Severity.MODERATE_OUTLIER);
    }
    {
        double val = getValAtModifiedZScore(6, mad, median);
        System.out.println("SEVERE => " + val);
        DataPoint dp = (new DataPoint(i++, val, null, "foo"));
        Severity s = madAlgo.analyze(dp).getSeverity();
        Assert.assertTrue(s == Severity.SEVERE_OUTLIER);
    }

    Assert.assertTrue(madAlgo.getMedianDistributions().get("foo").getAmount() <= 110);
    Assert.assertTrue(madAlgo.getMedianDistributions().get("foo").getChunks().size() <= 12);
}

From source file:cc.redberry.core.tensor.BulkTestsForParser.java

@Test
public void testAllExpressionsInTestDirectory() {
    File testDirectory = new File("src/test");
    Counter c = new Counter(), m = new Counter();
    DescriptiveStatistics statistics = new DescriptiveStatistics();
    testParseRecurrently(testDirectory, c, m, statistics);
    System.out.println("Total number of lines containing parse(..): " + c.counter);
    System.out.println("Total number of matched and parsed lines: " + m.counter);
    System.out.println("Strings statistics: \n\t" + statistics.toString().replace("\n", "\n\t"));
    Assert.assertTrue((c.counter - m.counter) < 2);
}

From source file:cc.kave.commons.pointsto.evaluation.PointsToSetEvaluation.java

public void run(Path contextsDir) throws IOException {
    StatementCounterVisitor stmtCounterVisitor = new StatementCounterVisitor();
    List<Context> contexts = getSamples(contextsDir).stream()
            .filter(cxt -> cxt.getSST().accept(stmtCounterVisitor, null) > 0).collect(Collectors.toList());
    log("Using %d contexts for evaluation\n", contexts.size());

    PointsToUsageExtractor extractor = new PointsToUsageExtractor();
    for (Context context : contexts) {
        PointstoSetSizeAnalysis analysis = new PointstoSetSizeAnalysis();
        extractor.extract(analysis.compute(context));
        results.addAll(analysis.getSetSizes());
    }/*from www. ja v  a  2s .c o  m*/

    DescriptiveStatistics statistics = new DescriptiveStatistics();
    for (Integer setSize : results) {
        statistics.addValue(setSize.doubleValue());
    }
    log("mean: %.2f\n", statistics.getMean());
    log("stddev: %.2f\n", statistics.getStandardDeviation());
    log("min/max: %.2f/%.2f\n", statistics.getMin(), statistics.getMax());
}