Example usage for org.apache.commons.math.stat.descriptive DescriptiveStatistics getN

List of usage examples for org.apache.commons.math.stat.descriptive DescriptiveStatistics getN

Introduction

In this page you can find the example usage for org.apache.commons.math.stat.descriptive DescriptiveStatistics getN.

Prototype

public long getN() 

Source Link

Document

Returns the number of available values

Usage

From source file:org.a3badran.platform.logging.writer.MetricsWriter.java

private Map<String, Long> getAllMetrics() {
    Map<String, Long> metrics = new HashMap<String, Long>();
    for (Entry<String, DescriptiveStatistics> entry : sampleMetrics.entrySet()) {
        // create a copy to reduce locking
        String name = entry.getKey();
        DescriptiveStatistics stats = entry.getValue().copy();
        metrics.put(name + ".sampleCount", (long) stats.getN());
        metrics.put(name + ".max", (long) stats.getMax());
        metrics.put(name + ".min", (long) stats.getMin());
        metrics.put(name + ".avg", (long) stats.getMean());
        metrics.put(name + ".50p", (long) stats.getPercentile(50));
        metrics.put(name + ".90p", (long) stats.getPercentile(90));
        metrics.put(name + ".99p", (long) stats.getPercentile(99));
    }/* www . j  a  v a  2s  .c  o m*/

    for (Entry<String, DescriptiveStatistics> cEntry : sampleCounterMetrics.entrySet()) {
        // create a copy to reduce locking
        String cName = cEntry.getKey();
        DescriptiveStatistics cStats = cEntry.getValue().copy();
        metrics.put(cName + ".max", (long) cStats.getMax());
        metrics.put(cName + ".min", (long) cStats.getMin());
        metrics.put(cName + ".avg", (long) cStats.getMean());
        metrics.put(cName + ".50p", (long) cStats.getPercentile(50));
        metrics.put(cName + ".90p", (long) cStats.getPercentile(90));
        metrics.put(cName + ".99p", (long) cStats.getPercentile(99));
    }

    for (Entry<String, AtomicLong> entry : scopeTotalMetrics.entrySet()) {
        metrics.put(entry.getKey(), entry.getValue().longValue());
    }

    for (Entry<String, AtomicLong> entry : appTotalMetrics.entrySet()) {
        metrics.put(entry.getKey(), entry.getValue().longValue());
    }

    return metrics;
}

From source file:org.apache.jackrabbit.oak.benchmark.AbstractTest.java

private void runTest(RepositoryFixture fixture, Repository repository, List<Integer> concurrencyLevels)
        throws Exception {

    setUp(repository, CREDENTIALS);//from   ww  w  .ja  va2  s  . c  om
    try {

        // Run a few iterations to warm up the system
        long warmupEnd = System.currentTimeMillis() + WARMUP;
        boolean stop = false;
        while (System.currentTimeMillis() < warmupEnd && !stop) {
            if (!stop) {
                // we want to execute this at lease once. after that we consider the
                // `haltRequested` flag.
                stop = haltRequested;
            }
            execute();
        }

        if (concurrencyLevels == null || concurrencyLevels.isEmpty()) {
            concurrencyLevels = Arrays.asList(1);
        }

        for (Integer concurrency : concurrencyLevels) {
            // Run the test
            DescriptiveStatistics statistics = runTest(concurrency);
            if (statistics.getN() > 0) {
                System.out.format("%-28.28s  %6d  %6.0f  %6.0f  %6.0f  %6.0f  %6.0f  %6d%n", fixture.toString(),
                        concurrency, statistics.getMin(), statistics.getPercentile(10.0),
                        statistics.getPercentile(50.0), statistics.getPercentile(90.0), statistics.getMax(),
                        statistics.getN());
                if (out != null) {
                    out.format("%-28.28s, %6d, %6.0f, %6.0f, %6.0f, %6.0f, %6.0f, %6d%n", fixture.toString(),
                            concurrency, statistics.getMin(), statistics.getPercentile(10.0),
                            statistics.getPercentile(50.0), statistics.getPercentile(90.0), statistics.getMax(),
                            statistics.getN());
                }
            }

        }
    } finally {
        tearDown();
    }
}

From source file:org.apache.jackrabbit.performance.AbstractPerformanceTest.java

private void runTest(AbstractTest test, String name, byte[] conf) {
    if (repoPattern.matcher(name).matches() && testPattern.matcher(test.toString()).matches()) {
        // Create the repository directory
        File dir = new File(new File("target", "repository"), name + "-" + test);
        dir.mkdirs();/*  w  ww .j  a  v a  2  s  . c o  m*/

        try {
            // Copy the configuration file into the repository directory
            File xml = new File(dir, "repository.xml");
            OutputStream output = FileUtils.openOutputStream(xml);
            try {
                output.write(conf, 0, conf.length);
            } finally {
                output.close();
            }

            // Create the repository
            RepositoryImpl repository = createRepository(dir, xml);
            try {
                // Run the test
                DescriptiveStatistics statistics = runTest(test, repository);
                if (statistics.getN() > 0) {
                    writeReport(test.toString(), name, statistics);
                }
            } finally {
                repository.shutdown();
            }
        } catch (Throwable t) {
            System.out.println("Unable to run " + test + ": " + t.getMessage());
        } finally {
            FileUtils.deleteQuietly(dir);
        }
    }
}

From source file:org.apache.sling.performance.FrameworkPerformanceMethod.java

@Override
public Object invokeExplosively(Object target, Object... params) throws Throwable {
    // Executes the test method on the supplied target

    // Check if this is the first test running from this specific
    // PerformanceSuite
    // and run the BeforeSuite methods
    if ((performanceSuiteState != null) && (performanceSuiteState.getBeforeSuiteMethod() != null)
            && (performanceSuiteState.getTargetObjectSuite() != null)
            && (performanceSuiteState.getNumberOfExecutedMethods() == 0)
            && !performanceSuiteState.testSuiteName.equals(ParameterizedTestList.TEST_CASE_ONLY)) {
        performanceSuiteState.getBeforeSuiteMethod().invoke(performanceSuiteState.getTargetObjectSuite());
    }/*from  w  ww. j  a  v  a 2 s  . c om*/

    // In case of a PerformanceSuite we need to run the methods annotated
    // with Before and After
    // ourselves as JUnit can't find them (JUnit is looking for them in the
    // test suite class);
    // in case we don't have to deal with a PerformanceSuite just skip this
    // as JUnit will run the methods itself
    if ((performanceSuiteState != null)
            && !performanceSuiteState.testSuiteName.equals(ParameterizedTestList.TEST_CASE_ONLY)) {

        recursiveCallSpecificMethod(this.target.getClass(), this.target, Before.class);
    }

    // Need to count the number of tests run from the PerformanceSuite
    // so that we can call the AfterSuite method after the last test from
    // the suite
    // has run and the AfterSuite needs to run
    performanceSuiteState.incrementNumberOfExecutedTestMethods();

    Object response = null;

    Method testMethodToInvoke = this.getMethod();

    PerformanceTest performanceAnnotation = testMethodToInvoke.getAnnotation(PerformanceTest.class);

    // retrieve the test configuration options
    int warmuptime = performanceAnnotation.warmuptime();
    int runtime = performanceAnnotation.runtime();
    int warmupinvocations = performanceAnnotation.warmupinvocations();
    int runinvocations = performanceAnnotation.runinvocations();
    double threshold = performanceAnnotation.threshold();

    DescriptiveStatistics statistics = new DescriptiveStatistics();

    if (warmupinvocations != 0) {
        // Run the number of invocation specified in the annotation
        // for warming up the system
        for (int invocationIndex = 0; invocationIndex < warmupinvocations; invocationIndex++) {

            recursiveCallSpecificMethod(this.target.getClass(), this.target, BeforeMethodInvocation.class);

            // TODO: implement the method to run a before a specific test
            // method
            // recursiveCallSpecificMethod(this.target.getClass(),
            // this.target, BeforeSpecificTest.class);

            response = super.invokeExplosively(this.target, params);

            // TODO: implement the method to run a after a specific test
            // method
            // recursiveCallSpecificMethod(this.target.getClass(),
            // this.target, AfterSpecificTest.class);

            recursiveCallSpecificMethod(this.target.getClass(), this.target, AfterMethodInvocation.class);
        }
    } else {
        // Run a few iterations to warm up the system
        long warmupEnd = System.currentTimeMillis() + warmuptime * 1000;
        while (System.currentTimeMillis() < warmupEnd) {
            recursiveCallSpecificMethod(this.target.getClass(), this.target, BeforeMethodInvocation.class);

            // TODO: implement the method to run a before a specific test
            // method
            // recursiveCallSpecificMethod(this.target.getClass(),
            // this.target, BeforeSpecificTest.class);

            response = super.invokeExplosively(this.target, params);

            // recursiveCallSpecificMethod(this.target.getClass(),
            // this.target, AfterSpecificTest.class);
            // TODO: implement the method to run a after a specific test
            // method

            recursiveCallSpecificMethod(this.target.getClass(), this.target, AfterMethodInvocation.class);
        }
    }

    // System.out.println("Warmup ended - test :" +
    // testMethodToInvoke.getName());
    if (runinvocations != 0) {
        // Run the specified number of iterations and capture the execution
        // times
        for (int invocationIndex = 0; invocationIndex < runinvocations; invocationIndex++) {

            response = this.invokeTimedTestMethod(testMethodToInvoke, statistics, params);
        }
    } else {
        // Run test iterations and capture the execution times
        long runtimeEnd = System.currentTimeMillis() + runtime * 1000;

        while (System.currentTimeMillis() < runtimeEnd) {

            response = this.invokeTimedTestMethod(testMethodToInvoke, statistics, params);

        }
    }

    if (statistics.getN() > 0) {
        if (referenceMethod == null) {
            ReportLogger.writeReport(this.performanceSuiteState.testSuiteName, testCaseName, className,
                    getMethod().getName(), statistics, ReportLogger.ReportType.TXT, reportLevel);
        } else {
            ReportLogger reportLogger = ReportLogger.getOrCreate(this.performanceSuiteState.testSuiteName,
                    testCaseName, getMethod().getDeclaringClass().getName(), referenceMethod);
            reportLogger.recordStatistics(getMethod().getName(), statistics, threshold);
        }
    }

    // In case of a PerformanceSuite we need to run the methods annotated
    // with Before and After
    // ourselves as JUnit can't find them; in case we don't have to deal
    // with a PerformanceSuite
    // just skip this as JUnit will run the methods itself
    if ((performanceSuiteState != null)
            && !performanceSuiteState.testSuiteName.equals(ParameterizedTestList.TEST_CASE_ONLY)) {

        recursiveCallSpecificMethod(this.target.getClass(), this.target, After.class);
    }

    // Check if this is the last test running from a PerformanceSuite
    // and run the AfterSuite method
    if ((performanceSuiteState != null) && (performanceSuiteState.getAfterSuiteMethod() != null)
            && (performanceSuiteState.getTargetObjectSuite() != null)
            && (performanceSuiteState.getNumberOfExecutedMethods() == performanceSuiteState
                    .getNumberOfMethodsInSuite())
            && !performanceSuiteState.testSuiteName.equals(ParameterizedTestList.TEST_CASE_ONLY)) {
        performanceSuiteState.getAfterSuiteMethod().invoke(performanceSuiteState.getTargetObjectSuite());
    }

    return response;
}

From source file:org.bresearch.websec.test.CommonsMathTest.java

public void test1() throws Exception {

    /* min, max, mean, geometric mean, n, sum, sum of squares, 
     * standard deviation, variance, percentiles, skewness, kurtosis, median */

    // Get a DescriptiveStatistics instance using factory method
    DescriptiveStatistics stats = new DescriptiveStatistics();

    final double[] inputArray = { 4, 3, 3, 2 };

    // Add the data from the array
    for (int i = 0; i < inputArray.length; i++) {
        stats.addValue(inputArray[i]);//from w w w  .  ja  va  2 s  . c  o  m
    }

    // Compute some statistics
    double mean = stats.getMean();
    double std = stats.getStandardDeviation();
    long n = stats.getN();
    assertEquals("3.0", "" + mean);
    assertEquals("0.816496580927726", "" + std);
}

From source file:org.bresearch.websec.test.CommonsMathTest.java

public void test2() {
    final String data2 = (new WordProcessor()).filterOnlyAlphaNumeric(
            " !!!   Hello my name is a person.   Hello how are you doing.  hello, this is great.  What do you think?   ");

    final BotlistStringUtils utils = new BotlistStringUtils();
    final List<String> a = utils.buildWordList(data2);
    assertEquals(27, a.size());/*from w ww. jav a 2s .  c o m*/

    DescriptiveStatistics stats = new DescriptiveStatistics();

    for (int i = 0; i < utils.mapReduceCount(a, -1).length; i++) {
        stats.addValue(utils.mapReduceCount(a, -1)[i]);
    }

    // Compute some statistics
    double mean = stats.getMean();
    double std = stats.getStandardDeviation();
    assertEquals("1.2666666666666666", "" + mean);
    assertEquals("0.5936168397046634", "" + std);

    long n = stats.getN();
    assertEquals("15", "" + n);

}

From source file:org.bresearch.websec.test.CommonsMathTest.java

public void test5() {
    final DocumentWordStats docStats = new DocumentWordStats(ConstDoc.CONST_SM);
    final DescriptiveStatistics stats = docStats.mapReduceStats();

    System.out.println("" + stats.getSum());
    System.out.println("" + stats.getMean());
    System.out.println("" + stats.getN());
    System.out.println("" + stats.getGeometricMean());
    System.out.println("" + stats.getMax());

}

From source file:org.fusesource.eca.processor.StatisticsCalculator.java

protected void process(StatisticsType type, Number value, ObjectNode statsNode) throws Exception {
    EventCache<Number> cache = this.eventCache;
    if (value != null && cache != null) {
        cache.add(value);//from   w w w. ja v  a  2  s.c  om
        if (type.equals(StatisticsType.RATE)) {
            calculateRate(statsNode);
        } else {
            List<Number> list = this.eventCache.getWindow();
            DescriptiveStatistics descriptiveStatistics = new DescriptiveStatistics();
            if (list != null && !list.isEmpty()) {
                for (Number number : list) {
                    descriptiveStatistics.addValue(number.doubleValue());
                }
                switch (type) {
                case MEAN:
                    statsNode.put("mean", descriptiveStatistics.getMean());
                    break;
                case GEOMETRIC_MEAN:
                    statsNode.put("gemetric mean", descriptiveStatistics.getGeometricMean());
                    break;
                case STDDEV:
                    statsNode.put("std-dev", descriptiveStatistics.getStandardDeviation());
                    break;
                case MIN:
                    statsNode.put("minimum", descriptiveStatistics.getMin());
                    break;
                case MAX:
                    statsNode.put("maximum", descriptiveStatistics.getMax());
                    break;
                case SKEWNESS:
                    statsNode.put("skewness", descriptiveStatistics.getSkewness());
                    break;
                case KUTOSIS:
                    statsNode.put("kurtosis", descriptiveStatistics.getKurtosis());
                    break;
                case VARIANCE:
                    statsNode.put("variance", descriptiveStatistics.getVariance());
                    break;
                case COUNT:
                    statsNode.put("count", list.size());
                default:
                    statsNode.put("number", descriptiveStatistics.getN());
                    statsNode.put("mean", descriptiveStatistics.getMean());
                    statsNode.put("gemetric mean", descriptiveStatistics.getGeometricMean());
                    statsNode.put("minimum", descriptiveStatistics.getMin());
                    statsNode.put("maximum", descriptiveStatistics.getMax());
                    statsNode.put("std-dev", descriptiveStatistics.getStandardDeviation());
                    statsNode.put("median", descriptiveStatistics.getPercentile(50));
                    statsNode.put("skewness", descriptiveStatistics.getSkewness());
                    statsNode.put("kurtosis", descriptiveStatistics.getKurtosis());
                    statsNode.put("variance", descriptiveStatistics.getVariance());
                    calculateRate(statsNode);
                    statsNode.put("count", list.size());
                }
            }
        }

    }
}

From source file:org.matsim.contrib.common.stats.StatsWriter.java

/**
 * Writes a table with columns map-key and statistical indicators mean, median, min, max and number of samples. Rows
 * are sorted according to the natural order of the map keys.
 *
 * @param statsMap a map with {@code DescriptiveStatistics} objects
 * @param keyLabel the header for the first column (containing the map keys)
 * @param file     the filename/* w  w w .j a  v a2 s  .  c o  m*/
 * @throws IOException
 */
public static void writeStatistics(TDoubleObjectHashMap<DescriptiveStatistics> statsMap, String keyLabel,
        String file) throws IOException {
    double[] keys = statsMap.keys();
    Arrays.sort(keys);

    BufferedWriter writer = new BufferedWriter(new FileWriter(file));

    writer.write(keyLabel);
    writer.write(TAB);
    writer.write("mean");
    writer.write(TAB);
    writer.write("median");
    writer.write(TAB);
    writer.write("min");
    writer.write(TAB);
    writer.write("max");
    writer.write(TAB);
    writer.write("n");
    writer.newLine();

    for (double key : keys) {
        DescriptiveStatistics stats = statsMap.get(key);

        writer.write(String.valueOf(key));
        writer.write(TAB);
        writer.write(String.valueOf(stats.getMean()));
        writer.write(TAB);
        writer.write(String.valueOf(stats.getPercentile(50)));
        writer.write(TAB);
        writer.write(String.valueOf(stats.getMin()));
        writer.write(TAB);
        writer.write(String.valueOf(stats.getMax()));
        writer.write(TAB);
        writer.write(String.valueOf(stats.getN()));
        writer.newLine();
    }

    writer.close();
}

From source file:org.matsim.contrib.socnetgen.sna.graph.analysis.AnalyzerTask.java

protected void printStats(DescriptiveStatistics stats, String key) {
    logger.info(String.format(/*from   w  w  w.j  av  a2 s.  c o m*/
            "Statistics for property %1$s:\n\tmean = %2$.4f, min = %3$.4f, max = %4$.4f, N = %5$s, Var = %6$.4f",
            key, stats.getMean(), stats.getMin(), stats.getMax(), stats.getN(), stats.getVariance()));
}