Example usage for org.apache.commons.math3.stat.descriptive SummaryStatistics getMax

List of usage examples for org.apache.commons.math3.stat.descriptive SummaryStatistics getMax

Introduction

In this page you can find the example usage for org.apache.commons.math3.stat.descriptive SummaryStatistics getMax.

Prototype

public double getMax() 

Source Link

Document

Returns the maximum of the values that have been added.

Usage

From source file:org.apache.cassandra.dht.tokenallocator.TokenAllocation.java

public static String statToString(SummaryStatistics stat) {
    return String.format("max %.2f min %.2f stddev %.4f", stat.getMax() / stat.getMean(),
            stat.getMin() / stat.getMean(), stat.getStandardDeviation());
}

From source file:org.apache.solr.client.solrj.io.eval.HistogramEvaluator.java

@Override
public Object doWork(Object... values) throws IOException {
    if (Arrays.stream(values).anyMatch(item -> null == item)) {
        return null;
    }/* w  ww  . ja va2s  . co  m*/

    List<?> sourceValues;
    Integer bins = 10;

    if (values.length >= 1) {
        sourceValues = values[0] instanceof List<?> ? (List<?>) values[0] : Arrays.asList(values[0]);

        if (values.length >= 2) {
            if (values[1] instanceof Number) {
                bins = ((Number) values[1]).intValue();
            } else {
                throw new IOException(String.format(Locale.ROOT,
                        "Invalid expression %s - if second parameter is provided then it must be a valid number but found %s instead",
                        toExpression(constructingFactory), values[1].getClass().getSimpleName()));
            }
        }
    } else {
        throw new IOException(
                String.format(Locale.ROOT, "Invalid expression %s - expecting at least one value but found %d",
                        toExpression(constructingFactory), containedEvaluators.size()));
    }

    EmpiricalDistribution distribution = new EmpiricalDistribution(bins);
    distribution.load(
            ((List<?>) sourceValues).stream().mapToDouble(value -> ((Number) value).doubleValue()).toArray());
    ;

    List<Tuple> histogramBins = new ArrayList<>();
    for (SummaryStatistics binSummary : distribution.getBinStats()) {
        Map<String, Number> map = new HashMap<>();
        map.put("max", binSummary.getMax());
        map.put("mean", binSummary.getMean());
        map.put("min", binSummary.getMin());
        map.put("stdev", binSummary.getStandardDeviation());
        map.put("sum", binSummary.getSum());
        map.put("N", binSummary.getN());
        map.put("var", binSummary.getVariance());
        map.put("cumProb", distribution.cumulativeProbability(binSummary.getMean()));
        map.put("prob", distribution.probability(binSummary.getMin(), binSummary.getMax()));
        histogramBins.add(new Tuple(map));
    }

    return histogramBins;
}

From source file:org.apache.solr.cloud.autoscaling.sim.TestLargeCluster.java

public void benchmarkNodeLost() throws Exception {
    List<String> results = new ArrayList<>();
    for (int wait : renard5x) {
        for (int delay : renard5x) {
            SummaryStatistics totalTime = new SummaryStatistics();
            SummaryStatistics ignoredOurEvents = new SummaryStatistics();
            SummaryStatistics ignoredOtherEvents = new SummaryStatistics();
            SummaryStatistics startedOurEvents = new SummaryStatistics();
            SummaryStatistics startedOtherEvents = new SummaryStatistics();
            for (int i = 0; i < 5; i++) {
                if (cluster != null) {
                    cluster.close();//from w w w.ja  v a  2  s. com
                }
                setupCluster();
                setUp();
                setupTest();
                long total = doTestNodeLost(wait, delay * 1000, 0);
                totalTime.addValue(total);
                // get event counts
                Map<String, Map<String, AtomicInteger>> counts = cluster.simGetEventCounts();
                Map<String, AtomicInteger> map = counts.remove("node_lost_trigger");
                startedOurEvents.addValue(map.getOrDefault("STARTED", ZERO).get());
                ignoredOurEvents.addValue(map.getOrDefault("IGNORED", ZERO).get());
                int otherStarted = 0;
                int otherIgnored = 0;
                for (Map<String, AtomicInteger> m : counts.values()) {
                    otherStarted += m.getOrDefault("STARTED", ZERO).get();
                    otherIgnored += m.getOrDefault("IGNORED", ZERO).get();
                }
                startedOtherEvents.addValue(otherStarted);
                ignoredOtherEvents.addValue(otherIgnored);
            }
            results.add(String.format(Locale.ROOT,
                    "%d\t%d\t%4.0f\t%4.0f\t%4.0f\t%4.0f\t%6.0f\t%6.0f\t%6.0f\t%6.0f\t%6.0f", wait, delay,
                    startedOurEvents.getMean(), ignoredOurEvents.getMean(), startedOtherEvents.getMean(),
                    ignoredOtherEvents.getMean(), totalTime.getMin(), totalTime.getMax(), totalTime.getMean(),
                    totalTime.getStandardDeviation(), totalTime.getVariance()));
        }
    }
    log.info("===== RESULTS ======");
    log.info("waitFor\tdelay\tSTRT\tIGN\toSTRT\toIGN\tmin\tmax\tmean\tstdev\tvar");
    results.forEach(s -> log.info(s));
}

From source file:org.apache.tika.eval.tokens.TokenStatistics.java

@Override
public boolean equals(Object o) {

    if (this == o)
        return true;
    if (o == null || getClass() != o.getClass())
        return false;

    TokenStatistics that = (TokenStatistics) o;

    if (totalTokens != that.totalTokens)
        return false;
    if (totalUniqueTokens != that.totalUniqueTokens)
        return false;
    if (!doubleEquals(that.entropy, entropy))
        return false;
    // Probably incorrect - comparing Object[] arrays with Arrays.equals
    if (!Arrays.equals(topN, that.topN))
        return false;

    SummaryStatistics thatS = ((TokenStatistics) o).summaryStatistics;
    if (summaryStatistics.getN() != thatS.getN())
        return false;

    //if both have n==0, don't bother with the stats
    if (summaryStatistics.getN() == 0L)
        return true;
    //TODO: consider adding others...
    if (!doubleEquals(summaryStatistics.getGeometricMean(), thatS.getGeometricMean()))
        return false;
    if (!doubleEquals(summaryStatistics.getMax(), thatS.getMax()))
        return false;
    if (!doubleEquals(summaryStatistics.getMean(), thatS.getMean()))
        return false;
    if (!doubleEquals(summaryStatistics.getMin(), thatS.getMin()))
        return false;
    if (!doubleEquals(summaryStatistics.getSum(), thatS.getSum()))
        return false;
    if (!doubleEquals(summaryStatistics.getStandardDeviation(), thatS.getStandardDeviation()))
        return false;
    return true;//from  w  ww .j a va 2  s .  c  om
}

From source file:org.apereo.portal.events.aggr.stat.JpaStatisticalSummary.java

/**
 * Returns true iff <code>object</code> is a
 * <code>SummaryStatistics</code> instance and all statistics have the
 * same values as this./* w  ww .  jav  a  2 s  .  c o m*/
 * @param object the object to test equality against.
 * @return true if object equals this
 */
@Override
public boolean equals(Object object) {
    if (object == this) {
        return true;
    }
    if (object instanceof SummaryStatistics == false) {
        return false;
    }
    SummaryStatistics stat = (SummaryStatistics) object;
    return Precision.equalsIncludingNaN(stat.getGeometricMean(), getGeometricMean())
            && Precision.equalsIncludingNaN(stat.getMax(), getMax())
            && Precision.equalsIncludingNaN(stat.getMean(), getMean())
            && Precision.equalsIncludingNaN(stat.getMin(), getMin())
            && Precision.equalsIncludingNaN(stat.getN(), getN())
            && Precision.equalsIncludingNaN(stat.getSum(), getSum())
            && Precision.equalsIncludingNaN(stat.getSumsq(), getSumsq())
            && Precision.equalsIncludingNaN(stat.getVariance(), getVariance());
}

From source file:org.calrissian.accumulorecipes.metricsstore.ext.stats.impl.AccumuloStatsMetricStoreTest.java

@Test
public void testStatisticAccuracy() throws Exception {
    AccumuloStatsMetricStore metricStore = new AccumuloStatsMetricStore(getConnector());

    Random random = new Random();

    List<Long> sampleData = asList((long) random.nextInt(10000), (long) random.nextInt(10000),
            (long) random.nextInt(10000), (long) random.nextInt(10000), (long) random.nextInt(10000));

    //use commons math as a
    SummaryStatistics sumStats = new SummaryStatistics();
    for (long num : sampleData)
        sumStats.addValue(num);//from w  w w .j  av  a 2  s  . c  o m

    final long timestamp = System.currentTimeMillis();
    Iterable<Metric> testData = transform(sampleData, new Function<Long, Metric>() {
        @Override
        public Metric apply(Long num) {
            return new Metric(timestamp, "group", "type", "name", "", num);
        }
    });

    metricStore.save(testData);

    List<Stats> stats = newArrayList(metricStore.queryStats(new Date(0), new Date(), "group", "type", "name",
            MetricTimeUnit.MINUTES, new Auths()));

    assertEquals(1, stats.size());
    Stats stat = stats.get(0);

    assertEquals(sumStats.getMin(), stat.getMin(), Double.MIN_NORMAL);
    assertEquals(sumStats.getMax(), stat.getMax(), Double.MIN_NORMAL);
    assertEquals(sumStats.getSum(), stat.getSum(), Double.MIN_NORMAL);
    assertEquals(sumStats.getN(), stat.getCount(), Double.MIN_NORMAL);
    assertEquals(sumStats.getMean(), stat.getMean(), Double.MIN_NORMAL);
    assertEquals(sumStats.getPopulationVariance(), stat.getVariance(), 0.00000001);
    assertEquals(sumStats.getVariance(), stat.getVariance(true), 0.00000001);
    assertEquals(sqrt(sumStats.getPopulationVariance()), stat.getStdDev(), 0.00000001);
    assertEquals(sumStats.getStandardDeviation(), stat.getStdDev(true), 0.00000001);
}

From source file:org.cloudsimplus.testbeds.linuxscheduler.CloudletSchedulerRunner.java

@Override
protected void printFinalResults(String metricName, SummaryStatistics stats) {
    System.out.printf("Results for metric %s\n", metricName);
    System.out.printf("  Mean Number of Cloudlets:         %.2f\n",
            cloudletsNumber.stream().mapToDouble(n -> n).average().orElse(0.0));
    System.out.printf("  Cloudlet Completion Time Avg:     %.2f | Std dev:      %.2f\n", stats.getMean(),
            stats.getStandardDeviation());
    System.out.printf("  Cloudlet Completion Min Avg Time: %.2f | Max avg time: %.2f\n", stats.getMin(),
            stats.getMax());
    System.out.println();/*w  w w.  j av a 2 s  .c  o m*/
}

From source file:org.eclipse.dataset.AbstractDataset.java

/**
 * Calculate summary statistics for a dataset
 * @param ignoreNaNs if true, ignore NaNs
 * @param ignoreInfs if true, ignore infinities
 * @param name//from w  ww. ja v  a2s.co  m
 */
protected void calculateSummaryStats(final boolean ignoreNaNs, final boolean ignoreInfs, final String name) {
    final IndexIterator iter = getIterator();
    final SummaryStatistics stats = new SummaryStatistics();

    if (storedValues == null || !storedValues.containsKey(STORE_HASH)) {
        boolean hasNaNs = false;
        double hash = 0;
        double pmax = Double.MIN_VALUE;
        double pmin = Double.POSITIVE_INFINITY;

        while (iter.hasNext()) {
            final double val = getElementDoubleAbs(iter.index);
            if (Double.isNaN(val)) {
                hash = (hash * 19) % Integer.MAX_VALUE;
                if (ignoreNaNs)
                    continue;
                hasNaNs = true;
            } else if (Double.isInfinite(val)) {
                hash = (hash * 19) % Integer.MAX_VALUE;
                if (ignoreInfs)
                    continue;
            } else {
                hash = (hash * 19 + val) % Integer.MAX_VALUE;
            }
            if (val > 0) {
                if (val < pmin) {
                    pmin = val;
                }
                if (val > pmax) {
                    pmax = val;
                }
            }
            stats.addValue(val);
        }

        int ihash = ((int) hash) * 19 + getDtype() * 17 + getElementsPerItem();
        setStoredValue(storeName(ignoreNaNs, ignoreInfs, STORE_SHAPELESS_HASH), ihash);
        storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_MAX),
                hasNaNs ? Double.NaN : fromDoubleToNumber(stats.getMax()));
        storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_MIN),
                hasNaNs ? Double.NaN : fromDoubleToNumber(stats.getMin()));
        storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_POS_MAX),
                hasNaNs ? Double.NaN : fromDoubleToNumber(pmax));
        storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_POS_MIN),
                hasNaNs ? Double.NaN : fromDoubleToNumber(pmin));
        storedValues.put(name, stats);
    } else {
        while (iter.hasNext()) {
            final double val = getElementDoubleAbs(iter.index);
            if (ignoreNaNs && Double.isNaN(val)) {
                continue;
            }
            if (ignoreInfs && Double.isInfinite(val)) {
                continue;
            }

            stats.addValue(val);
        }

        storedValues.put(name, stats);
    }
}

From source file:org.eclipse.january.dataset.AbstractDataset.java

/**
 * Calculate summary statistics for a dataset
 * @param ignoreNaNs if true, ignore NaNs
 * @param ignoreInfs if true, ignore infinities
 * @param name/*w  ww . j  a va 2  s  .c  o  m*/
 */
protected void calculateSummaryStats(final boolean ignoreNaNs, final boolean ignoreInfs, final String name) {
    final IndexIterator iter = getIterator();
    final SummaryStatistics stats = new SummaryStatistics();
    //sum of logs is slow and we dont use it, so blocking its calculation here
    stats.setSumLogImpl(new NullStorelessUnivariateStatistic());

    if (storedValues == null || !storedValues.containsKey(STORE_HASH)) {
        boolean hasNaNs = false;
        double hash = 0;

        while (iter.hasNext()) {
            final double val = getElementDoubleAbs(iter.index);
            if (Double.isNaN(val)) {
                hash = (hash * 19) % Integer.MAX_VALUE;
                if (ignoreNaNs)
                    continue;
                hasNaNs = true;
            } else if (Double.isInfinite(val)) {
                hash = (hash * 19) % Integer.MAX_VALUE;
                if (ignoreInfs)
                    continue;
            } else {
                hash = (hash * 19 + val) % Integer.MAX_VALUE;
            }
            stats.addValue(val);
        }

        int ihash = ((int) hash) * 19 + getDType() * 17 + getElementsPerItem();
        setStoredValue(storeName(ignoreNaNs, ignoreInfs, STORE_SHAPELESS_HASH), ihash);
        storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_MAX),
                hasNaNs ? Double.NaN : fromDoubleToNumber(stats.getMax()));
        storedValues.put(storeName(ignoreNaNs, ignoreInfs, STORE_MIN),
                hasNaNs ? Double.NaN : fromDoubleToNumber(stats.getMin()));
        storedValues.put(name, stats);
    } else {
        while (iter.hasNext()) {
            final double val = getElementDoubleAbs(iter.index);
            if (ignoreNaNs && Double.isNaN(val)) {
                continue;
            }
            if (ignoreInfs && Double.isInfinite(val)) {
                continue;
            }

            stats.addValue(val);
        }

        storedValues.put(name, stats);
    }
}

From source file:org.eclipse.january.metadata.internal.StatisticsMetadataImpl.java

/**
 * Calculate summary statistics for a dataset
 * @param ignoreNaNs if true, ignore NaNs
 * @param ignoreInfs if true, ignore infinities
 *//*from w w w  . ja  v  a2s.c  om*/
@SuppressWarnings("unchecked")
private SummaryStatistics[] createSummaryStats(final MaxMin<T> mm, final boolean ignoreNaNs,
        final boolean ignoreInfs) {
    final IndexIterator iter = dataset.getIterator();
    SummaryStatistics[] istats = new SummaryStatistics[isize];
    for (int i = 0; i < isize; i++) {
        istats[i] = new SummaryStatistics();
        // sum of logs is slow and we don't use it, so blocking its calculation here
        istats[i].setSumLogImpl(new NullStorelessUnivariateStatistic());
    }

    SummaryStatistics stats;
    if (isize == 1) {
        boolean hasNaNs = false;
        stats = istats[0];
        if (dataset.hasFloatingPointElements() && (ignoreNaNs || ignoreInfs)) {
            while (iter.hasNext()) {
                final double val = dataset.getElementDoubleAbs(iter.index);
                hash = (int) (hash * 19 + Double.doubleToRawLongBits(val));
                if (Double.isNaN(val)) {
                    if (ignoreNaNs)
                        continue;
                    hasNaNs = true;
                } else if (Double.isInfinite(val)) {
                    if (ignoreInfs)
                        continue;
                }
                stats.addValue(val);
            }
        } else if (dataset.hasFloatingPointElements()) {
            while (iter.hasNext()) {
                final double val = dataset.getElementDoubleAbs(iter.index);
                hash = (int) (hash * 19 + Double.doubleToRawLongBits(val));
                if (Double.isNaN(val)) {
                    hasNaNs = true;
                }
                stats.addValue(val);
            }
        } else {
            while (iter.hasNext()) {
                final long val = dataset.getElementLongAbs(iter.index);
                hash = (int) (hash * 19 + val);
                stats.addValue(val);
            }
        }

        mm.maximum = (T) (hasNaNs ? Double.NaN : DTypeUtils.fromDoubleToBiggestNumber(stats.getMax(), dtype));
        mm.minimum = (T) (hasNaNs ? Double.NaN : DTypeUtils.fromDoubleToBiggestNumber(stats.getMin(), dtype));
    } else {
        double[] vals = new double[isize];
        while (iter.hasNext()) {
            boolean okay = true;
            for (int j = 0; j < isize; j++) {
                final double val = dataset.getElementDoubleAbs(iter.index + j);
                if (ignoreNaNs && Double.isNaN(val)) {
                    okay = false;
                    break;
                }
                if (ignoreInfs && Double.isInfinite(val)) {
                    okay = false;
                    break;
                }
                vals[j] = val;
            }
            if (okay) {
                for (int j = 0; j < isize; j++) {
                    double val = vals[j];
                    istats[j].addValue(val);
                    hash = (int) (hash * 19 + Double.doubleToRawLongBits(val));
                }
            }
        }

        double[] lmax = new double[isize];
        double[] lmin = new double[isize];
        for (int j = 0; j < isize; j++) {
            stats = istats[j];
            lmax[j] = stats.getMax();
            lmin[j] = stats.getMin();
        }
        mm.maximum = (T) lmax;
        mm.minimum = (T) lmin;
    }

    hash = hash * 19 + dtype * 17 + isize;
    mm.maximumPositions = null;
    mm.minimumPositions = null;
    return istats;
}