Example usage for org.apache.commons.math3.stat.descriptive DescriptiveStatistics getN

List of usage examples for org.apache.commons.math3.stat.descriptive DescriptiveStatistics getN

Introduction

In this page you can find the example usage for org.apache.commons.math3.stat.descriptive DescriptiveStatistics getN.

Prototype

public long getN() 

Source Link

Document

Returns the number of available values

Usage

From source file:com.intuit.tank.vm.common.util.ReportUtil.java

public static final String[] getSummaryData(String key, DescriptiveStatistics stats) {
    String[] ret = new String[ReportUtil.SUMMARY_HEADERS.length + PERCENTILES.length];
    int i = 0;/*from  ww  w. ja  v a2  s.c  o m*/
    ret[i++] = key;// Page ID
    ret[i++] = INT_NF.format(stats.getN());// Sample Size
    ret[i++] = DOUBLE_NF.format(stats.getMean());// Mean
    ret[i++] = INT_NF.format(stats.getPercentile(50));// Meadian
    ret[i++] = INT_NF.format(stats.getMin());// Min
    ret[i++] = INT_NF.format(stats.getMax());// Max
    ret[i++] = DOUBLE_NF.format(stats.getStandardDeviation());// Std Dev
    ret[i++] = DOUBLE_NF.format(stats.getKurtosis());// Kurtosis
    ret[i++] = DOUBLE_NF.format(stats.getSkewness());// Skewness
    ret[i++] = DOUBLE_NF.format(stats.getVariance());// Varience
    for (int n = 0; n < PERCENTILES.length; n++) {
        ret[i++] = INT_NF.format(stats.getPercentile((Integer) PERCENTILES[n][1]));// Percentiles
    }
    return ret;
}

From source file:com.intuit.tank.service.impl.v1.report.SummaryReportRunner.java

/**
 * @param jobId/*from   w  ww. j  a  v  a  2  s. c o  m*/
 * @param key
 * @param stats
 * @return
 */
private static PeriodicData getBucketData(int jobId, String key, BucketDataItem bucketItem) {
    DescriptiveStatistics stats = bucketItem.getStats();
    PeriodicData ret = PeriodicDataBuilder.periodicData().withJobId(jobId).withMax(stats.getMax())
            .withMean(stats.getMean()).withMin(stats.getMin()).withPageId(key)
            .withSampleSize((int) stats.getN()).withPeriod(bucketItem.getPeriod())
            .withTimestamp(bucketItem.getStartTime()).build();
    return ret;
}

From source file:com.github.aptd.simulation.core.statistic.local.CStatistic.java

/**
 * write data/*  w w w .  j  a v a2s.c  o  m*/
 *
 * @param p_writer writer instance
 * @param p_name section name
 * @param p_statistic statistic value
 */
private static void apply(final IWriter p_writer, final String p_name,
        final DescriptiveStatistics p_statistic) {
    p_writer.section(1, p_name);

    p_writer.value("geometricmean", p_statistic.getGeometricMean());
    p_writer.value("kurtosis", p_statistic.getKurtosis());
    p_writer.value("max", p_statistic.getMax());
    p_writer.value("min", p_statistic.getMin());
    p_writer.value("mean", p_statistic.getMean());
    p_writer.value("count", p_statistic.getN());
    p_writer.value("25-percentile", p_statistic.getPercentile(0.25));
    p_writer.value("75-percentile", p_statistic.getPercentile(0.75));
    p_writer.value("populationvariance", p_statistic.getPopulationVariance());
    p_writer.value("quadraticmean", p_statistic.getQuadraticMean());
    p_writer.value("standdeviation", p_statistic.getStandardDeviation());
    p_writer.value("skewness", p_statistic.getSkewness());
    p_writer.value("sum", p_statistic.getSum());
    p_writer.value("sumsequared", p_statistic.getSumsq());
    p_writer.value("variance", p_statistic.getVariance());
}

From source file:com.intuit.tank.service.impl.v1.report.SummaryReportRunner.java

/**
 * @param key/*from   www.ja v a  2 s  .c  om*/
 * @param value
 * @return
 */
private static SummaryData getSummaryData(int jobId, String key, DescriptiveStatistics stats) {
    SummaryData ret = SummaryDataBuilder.summaryData().withJobId(jobId)
            .withKurtosis(!Double.isNaN(stats.getKurtosis()) ? stats.getKurtosis() : 0).withMax(stats.getMax())
            .withMean(stats.getMean()).withMin(stats.getMin()).withPageId(key)
            .withPercentile10(stats.getPercentile(10)).withPercentile20(stats.getPercentile(20))
            .withPercentile30(stats.getPercentile(30)).withPercentile40(stats.getPercentile(40))
            .withPercentile50(stats.getPercentile(50)).withPercentile60(stats.getPercentile(60))
            .withPercentile70(stats.getPercentile(70)).withPercentile80(stats.getPercentile(80))
            .withPercentile90(stats.getPercentile(90)).withPercentile95(stats.getPercentile(95))
            .withPercentile99(stats.getPercentile(99)).withSampleSize((int) stats.getN())
            .withSkewness(!Double.isNaN(stats.getSkewness()) ? stats.getSkewness() : 0)
            .withSttDev(!Double.isNaN(stats.getStandardDeviation()) ? stats.getStandardDeviation() : 0)
            .withVarience(!Double.isNaN(stats.getVariance()) ? stats.getVariance() : 0).build();
    return ret;
}

From source file:me.datamining.bandwidth.ScottsRule.java

public double bandWidth(double variance, int dimensions, DescriptiveStatistics data) {
    return this.bandWidth(variance, dimensions, data.getN());
}

From source file:io.atomix.cluster.impl.PhiAccrualFailureDetector.java

/**
 * Compute phi for the specified node id.
 *
 * @return phi value/* w w w.j av  a2s .  co m*/
 */
public double phi() {
    long latestHeartbeat = history.latestHeartbeatTime();
    DescriptiveStatistics samples = history.samples();
    if (samples.getN() < minSamples) {
        return 0.0;
    }
    return computePhi(samples, latestHeartbeat, System.currentTimeMillis());
}

From source file:com.insightml.data.features.stats.FeatureStatistics.java

public int getN(final String feature) {
    final DescriptiveStatistics stat = stats.get(feature);
    return (int) ((stat == null ? 0 : stat.getN()) + getNull(feature));
}

From source file:io.atomix.cluster.impl.PhiAccrualFailureDetector.java

/**
 * Computes the phi value from the given samples.
 * <p>//w  w w  .j a  v a  2s.  c  o m
 * The original phi value in Hayashibara's paper is calculated based on a normal distribution.
 * Here, we calculate it based on an exponential distribution.
 *
 * @param samples       the samples from which to compute phi
 * @param lastHeartbeat the last heartbeat
 * @param currentTime   the current time
 * @return phi
 */
private double computePhi(DescriptiveStatistics samples, long lastHeartbeat, long currentTime) {
    long size = samples.getN();
    long t = currentTime - lastHeartbeat;
    return (size > 0) ? phiFactor * t / samples.getMean() : 100;
}

From source file:com.datatorrent.netlet.benchmark.util.BenchmarkResults.java

private String getResults() {
    DescriptiveStatistics statistics = getDescriptiveStatistics();
    final StringBuilder sb = new StringBuilder();
    sb.append("Iterations: ").append(statistics.getN());
    sb.append(" | Avg Time: ").append(fromNanoTime(statistics.getMean()));
    sb.append(" | Min Time: ").append(fromNanoTime(statistics.getMin()));
    sb.append(" | Max Time: ").append(fromNanoTime(statistics.getMax()));
    sb.append(" | 75% Time: ").append(fromNanoTime(statistics.getPercentile(75d)));
    sb.append(" | 90% Time: ").append(fromNanoTime(statistics.getPercentile(90d)));
    sb.append(" | 99% Time: ").append(fromNanoTime(statistics.getPercentile(99d)));
    sb.append(" | 99.9% Time: ").append(fromNanoTime(statistics.getPercentile(99.9d)));
    sb.append(" | 99.99% Time: ").append(fromNanoTime(statistics.getPercentile(99.99d)));
    sb.append(" | 99.999% Time: ").append(fromNanoTime(statistics.getPercentile(99.999d)));

    return sb.toString();

}

From source file:me.datamining.bandwidth.MesureOfSpread.java

public double bandWidth(double variance, int dimensions, DescriptiveStatistics data) {

    double q1 = data.getPercentile(q1_);
    double q3 = data.getPercentile(q3_);
    return 0.9 * Math.min(variance, (q3 - q1) / 1.34) * Math.pow(data.getN(), -(1.0 / 5.0));
}