Example usage for org.apache.commons.math.stat.descriptive SummaryStatistics SummaryStatistics

List of usage examples for org.apache.commons.math.stat.descriptive SummaryStatistics SummaryStatistics

Introduction

In this page you can find the example usage for org.apache.commons.math.stat.descriptive SummaryStatistics SummaryStatistics.

Prototype

public SummaryStatistics() 

Source Link

Document

Construct a SummaryStatistics instance

Usage

From source file:net.shipilev.fjptrace.tasks.PrintSummaryTask.java

private void summarizeEvents(PrintWriter pw, Events events) {
    SummaryStatistics completeTimes = new SummaryStatistics();
    SummaryStatistics execTimes = new SummaryStatistics();
    Map<Integer, Long> times = new HashMap<>();

    for (Event e : events) {
        switch (e.eventType) {
        case COMPLETING:
            times.put(e.tag, e.time);/*  w ww  . j av a 2 s  .c  o  m*/
            break;
        case COMPLETED: {
            Long startTime = times.get(e.tag);
            if (startTime != null) {
                completeTimes.addValue(e.time - startTime);
            }
            break;
        }
        case EXEC:
            times.put(e.tag, e.time);
            break;
        case EXECUTED:
            Long startTime = times.get(e.tag);
            if (startTime != null) {
                execTimes.addValue(e.time - startTime);
            }
            break;
        }
    }

    pw.println();
    pw.println("EXEC -> EXECUTED: " + TimeUnit.NANOSECONDS.toMillis((long) execTimes.getSum()) + "ms");
    pw.println(
            "COMPLETING -> COMPLETED: " + TimeUnit.NANOSECONDS.toMillis((long) completeTimes.getSum()) + "ms");

}

From source file:geogebra.kernel.statistics.AlgoTMean2Estimate.java

protected final void compute() {

    try {/*from   ww w. j  av a 2s . c  om*/

        // get statistics from sample data input
        if (input.length == 4) {

            size1 = geoList1.size();
            if (!geoList1.isDefined() || size1 < 2) {
                result.setUndefined();
                return;
            }

            size2 = geoList2.size();
            if (!geoList2.isDefined() || size2 < 2) {
                result.setUndefined();
                return;
            }

            val1 = new double[size1];
            for (int i = 0; i < size1; i++) {
                GeoElement geo = geoList1.get(i);
                if (geo.isNumberValue()) {
                    NumberValue num = (NumberValue) geo;
                    val1[i] = num.getDouble();

                } else {
                    result.setUndefined();
                    return;
                }
            }

            val2 = new double[size2];
            for (int i = 0; i < size2; i++) {
                GeoElement geo = geoList2.get(i);
                if (geo.isNumberValue()) {
                    NumberValue num = (NumberValue) geo;
                    val2[i] = num.getDouble();

                } else {
                    result.setUndefined();
                    return;
                }
            }

            stats = new SummaryStatistics();
            for (int i = 0; i < val1.length; i++) {
                stats.addValue(val1[i]);
            }

            n1 = stats.getN();
            var1 = stats.getVariance();
            mean1 = stats.getMean();

            stats.clear();
            for (int i = 0; i < val2.length; i++) {
                stats.addValue(val2[i]);
            }

            n2 = stats.getN();
            var2 = stats.getVariance();
            mean2 = stats.getMean();

        } else {
            mean1 = geoMean1.getDouble();
            var1 = geoSD1.getDouble() * geoSD1.getDouble();
            n1 = geoN1.getDouble();

            mean2 = geoMean2.getDouble();
            var2 = geoSD2.getDouble() * geoSD2.getDouble();
            n2 = geoN2.getDouble();
        }

        level = geoLevel.getDouble();
        pooled = geoPooled.getBoolean();

        // validate statistics
        if (level < 0 || level > 1 || var1 < 0 || n1 < 1 || var2 < 0 || n2 < 1) {
            result.setUndefined();
            return;
        }

        // get interval estimate 
        me = getMarginOfError(var1, n1, var2, n2, level, pooled);

        // return list = {low limit, high limit, difference, margin of error, df }
        difference = mean1 - mean2;
        result.clear();
        boolean oldSuppress = cons.isSuppressLabelsActive();
        cons.setSuppressLabelCreation(true);
        result.add(new GeoNumeric(cons, difference - me));
        result.add(new GeoNumeric(cons, difference + me));
        //result.add(new GeoNumeric(cons, difference));
        //result.add(new GeoNumeric(cons, me));
        //result.add(new GeoNumeric(cons, getDegreeOfFreedom(var1, var2, n1, n2, pooled)));

        cons.setSuppressLabelCreation(oldSuppress);

    } catch (IllegalArgumentException e) {
        e.printStackTrace();
    } catch (MathException e) {
        e.printStackTrace();
    }

}

From source file:geogebra.common.kernel.statistics.AlgoTMean2Estimate.java

@Override
public final void compute() {

    try {/*  ww w . ja  v a2  s. c o m*/

        // get statistics from sample data input
        if (input.length == 4) {

            size1 = geoList1.size();
            if (!geoList1.isDefined() || size1 < 2) {
                result.setUndefined();
                return;
            }

            size2 = geoList2.size();
            if (!geoList2.isDefined() || size2 < 2) {
                result.setUndefined();
                return;
            }

            val1 = new double[size1];
            for (int i = 0; i < size1; i++) {
                GeoElement geo = geoList1.get(i);
                if (geo instanceof NumberValue) {
                    NumberValue num = (NumberValue) geo;
                    val1[i] = num.getDouble();

                } else {
                    result.setUndefined();
                    return;
                }
            }

            val2 = new double[size2];
            for (int i = 0; i < size2; i++) {
                GeoElement geo = geoList2.get(i);
                if (geo instanceof NumberValue) {
                    NumberValue num = (NumberValue) geo;
                    val2[i] = num.getDouble();

                } else {
                    result.setUndefined();
                    return;
                }
            }

            stats = new SummaryStatistics();
            for (int i = 0; i < val1.length; i++) {
                stats.addValue(val1[i]);
            }

            n1 = stats.getN();
            var1 = stats.getVariance();
            mean1 = stats.getMean();

            stats.clear();
            for (int i = 0; i < val2.length; i++) {
                stats.addValue(val2[i]);
            }

            n2 = stats.getN();
            var2 = stats.getVariance();
            mean2 = stats.getMean();

        } else {
            mean1 = geoMean1.getDouble();
            var1 = geoSD1.getDouble() * geoSD1.getDouble();
            n1 = geoN1.getDouble();

            mean2 = geoMean2.getDouble();
            var2 = geoSD2.getDouble() * geoSD2.getDouble();
            n2 = geoN2.getDouble();
        }

        level = geoLevel.getDouble();
        pooled = geoPooled.getBoolean();

        // validate statistics
        if (level < 0 || level > 1 || var1 < 0 || n1 < 1 || var2 < 0 || n2 < 1) {
            result.setUndefined();
            return;
        }

        // get interval estimate
        me = getMarginOfError(var1, n1, var2, n2, level, pooled);

        // return list = {low limit, high limit, difference, margin of
        // error, df }
        difference = mean1 - mean2;
        result.clear();
        boolean oldSuppress = cons.isSuppressLabelsActive();
        cons.setSuppressLabelCreation(true);
        result.add(new GeoNumeric(cons, difference - me));
        result.add(new GeoNumeric(cons, difference + me));
        // result.add(new GeoNumeric(cons, difference));
        // result.add(new GeoNumeric(cons, me));
        // result.add(new GeoNumeric(cons, getDegreeOfFreedom(var1, var2,
        // n1, n2, pooled)));

        cons.setSuppressLabelCreation(oldSuppress);

    } catch (IllegalArgumentException e) {
        e.printStackTrace();
    } catch (MathException e) {
        e.printStackTrace();
    }

}

From source file:com.vmware.upgrade.progress.impl.SimpleAggregatingProgressReporter.java

private int calculateProgress() {
    final SummaryStatistics childProgress = new SummaryStatistics();
    for (final PropagatingListener listener : childListeners) {
        childProgress.addValue(listener.getCurrentProgressReport().getProgress());
    }//from   www.j a  v  a 2  s . c  om

    final int roundedProgress = (int) Math.round(childProgress.getMean());
    return roundedProgress;
}

From source file:edu.scripps.fl.curves.plot.CurvePlot.java

protected YIntervalSeries getSeries(Map<Double, Collection<Double>> map, String description) {
    YIntervalSeries series = new YIntervalSeries(description);
    series.setDescription(description);/*from w  w  w.  j  a  v a  2s.  c  o  m*/
    for (Object o : map.keySet()) {
        SummaryStatistics stats = new SummaryStatistics();
        Collection<Double> values = (Collection<Double>) map.get(o);
        for (Double d : values)
            stats.addValue(d);
        double avg = stats.getMean();
        double stddev = stats.getStandardDeviation();
        //         System.out.println(String.format("Adding %e\t%.2f\t%.2f",o, avg, stddev));
        series.add((Double) o, avg, avg - stddev, avg + stddev);
    }
    return series;
}

From source file:edu.cornell.med.icb.goby.modes.CompactFileStatsMode.java

/**
 * Print statistics about an alignment file in the Goby compact form.
 *
 * @param file The file to display statistics about
 * @throws IOException if the file cannot be read
 */// w w  w . j a v a  2  s . c om

private void describeCompactAlignment(final File file) throws IOException {

    final String basename = AlignmentReaderImpl.getBasename(file.toString());
    stream.printf("Compact Alignment basename = %s%n", basename);

    final AlignmentReaderImpl reader = new AlignmentReaderImpl(basename);
    reader.readHeader();
    stream.println("Info from header:");
    stream.printf("Alignment written with Goby version=%s %n", reader.getGobyVersion());
    stream.printf("Alignment produced by aligner=%s version=%s %n", reader.getAlignerName(),
            reader.getAlignerVersion());
    stream.printf("Sorted: %b%n", reader.isSorted());
    stream.printf("Indexed: %b%n", reader.isIndexed());
    stream.printf("Number of target sequences = %,d%n", reader.getNumberOfTargets());
    final int[] targetLengthsFromHeader = reader.getTargetLength();
    stream.printf("Number of target length entries = %,d%n", ArrayUtils.getLength(reader.getTargetLength()));
    stream.printf("smallestSplitQueryIndex = %d%n", reader.getSmallestSplitQueryIndex());
    stream.printf("largestSplitQueryIndex = %d%n", reader.getLargestSplitQueryIndex());

    // simple statistics for target lengths
    final SummaryStatistics targetLengthStats = new SummaryStatistics();
    if (targetLengthsFromHeader != null) {
        for (final double d : targetLengthsFromHeader) {
            targetLengthStats.addValue(d);
        }
    }
    stream.printf("Min target length = %,d%n", (int) targetLengthStats.getMin());
    stream.printf("Max target length = %,d%n", (int) targetLengthStats.getMax());
    stream.printf("Mean target length = %,.2f%n", targetLengthStats.getMean());
    stream.println();

    stream.printf("Number of query sequences = %,d%n", reader.getNumberOfQueries());

    final SummaryStatistics queryLengthStats = new SummaryStatistics();

    stream.println("Query lengths stored in entries = " + reader.isQueryLengthStoredInEntries());
    stream.println("Constant query lengths = " + reader.isConstantQueryLengths());

    stream.printf("Has query identifiers = %s%n",
            reader.getQueryIdentifiers() != null && !reader.getQueryIdentifiers().isEmpty());
    final IndexedIdentifier targetIdentifiers = reader.getTargetIdentifiers();
    final boolean hasTargetIdentifiers = targetIdentifiers != null && !targetIdentifiers.isEmpty();
    stream.printf("Has target identifiers = %s%n", hasTargetIdentifiers);
    stream.printf("Has query index permutation = %s%n", reader.getQueryIndicesWerePermuted());
    stream.printf("Has query index occurrences = %s%n", reader.hasQueryIndexOccurrences());
    stream.printf("Has all read quality scores = %s%n", reader.getHasAllReadQualityScores());
    stream.printf("Has ambiguity = %s%n", reader.hasAmbiguity());

    if (verbose) {
        if (hasTargetIdentifiers) {
            for (Map.Entry<MutableString, Integer> entry : targetIdentifiers.entrySet()) {
                stream.printf("  Target %s=%d with a length of %d%n", entry.getKey(), entry.getValue(),
                        targetLengthsFromHeader[entry.getValue()]);
            }
        } else {
            for (Map.Entry<MutableString, Integer> entry : targetIdentifiers.entrySet()) {
                stream.printf("  Target %d with a length of %d%n", entry.getValue(),
                        targetLengthsFromHeader[entry.getValue()]);
            }
        }
    }

    stream.println();

    if (reader.getReadOriginInfo().size() > 0) {
        stream.println("---- Read Origin Info ------");
        for (final Alignments.ReadOriginInfo info : reader.getReadOriginInfo().getPbList()) {
            stream.println("[");
            stream.print(info.toString());
            stream.println("]");
        }
    } else {
        stream.println("Alignment has no Read Origin Info/Read Groups");
    }
    if (headerOnly)
        return;
    // the query indices that aligned. Includes those
    final DistinctIntValueCounterBitSet alignedQueryIndices = new DistinctIntValueCounterBitSet();

    describeAmbigousReads(basename, reader.getNumberOfQueries(), alignedQueryIndices);

    int maxQueryIndex = -1;
    int maxTargetIndex = -1;
    int numEntries = 0;
    long numLogicalAlignmentEntries = 0;
    long total = 0;
    double avgScore = 0;
    int sumNumVariations = 0;
    int numPaired = 0;
    int numProperlyPaired = 0;
    int numFirstInPair = 0;
    int numSecondInPair = 0;
    boolean hasSoftClips = false;

    for (final Alignments.AlignmentEntry entry : reader) {
        numberOfReads++; // Across all files
        numEntries++; // Across this file
        numLogicalAlignmentEntries += Math.max(entry.getMultiplicity(), 1);
        total += entry.getQueryAlignedLength();
        avgScore += entry.getScore();
        maxQueryIndex = Math.max(maxQueryIndex, entry.getQueryIndex());
        maxTargetIndex = Math.max(maxTargetIndex, entry.getTargetIndex());
        cumulativeReadLength += entry.getQueryAlignedLength();
        minReadLength = Math.min(minReadLength, entry.getQueryAlignedLength());
        maxReadLength = Math.max(maxReadLength, entry.getQueryAlignedLength());
        sumNumVariations += entry.getSequenceVariationsCount();
        alignedQueryIndices.observe(entry.getQueryIndex());
        hasSoftClips |= entry.hasSoftClippedBasesLeft();
        hasSoftClips |= entry.hasSoftClippedBasesRight();
        // check entry then header for the query length

        final double queryLength = entry.getQueryLength();
        queryLengthStats.addValue(queryLength);

        numPaired += EntryFlagHelper.isPaired(entry) ? 1 : 0;
        numProperlyPaired += EntryFlagHelper.isProperlyPaired(entry) ? 1 : 0;
        numFirstInPair += EntryFlagHelper.isFirstInPair(entry) ? 1 : 0;
        numSecondInPair += EntryFlagHelper.isSecondInPair(entry) ? 1 : 0;
    }

    avgScore /= (double) numLogicalAlignmentEntries;

    final int numQuerySequences = reader.getNumberOfQueries();
    stream.printf("num query indices = %,d%n", numQuerySequences);
    final int numTargetSequences = maxTargetIndex + 1;
    final double avgNumVariationsPerQuery = ((double) sumNumVariations) / (double) numQuerySequences;
    stream.printf("num target indices = %,d%n", numTargetSequences);
    stream.printf("Number of alignment entries = %,d%n", numLogicalAlignmentEntries);
    stream.printf("Number of query indices that matched = %,d%n", alignedQueryIndices.count());
    stream.printf("Percent matched = %4.1f %% %n",
            (double) alignedQueryIndices.count() / (double) ((long) numQuerySequences) * 100.0d);
    stream.printf("Avg query alignment length = %,f%n", numEntries > 0 ? divide(total, numEntries) : -1);
    stream.printf("Avg score alignment = %f%n", avgScore);
    stream.printf("Avg number of variations per query sequence = %3.2f %n", avgNumVariationsPerQuery);
    // size, the number of bytes in the entries file.
    final long size = new File(basename + ".entries").length();
    stream.printf("Average bytes per entry = %f%n", divide(size, numLogicalAlignmentEntries));

    stream.printf("Min query length = %,d%n", (int) queryLengthStats.getMin());
    stream.printf("Max query length = %,d%n", (int) queryLengthStats.getMax());
    final double meanQueryLength = queryLengthStats.getMean();
    stream.printf("Mean query length = %,.2f%n", meanQueryLength);
    final int averageReadLength = (int) (Math.round(meanQueryLength));
    stream.printf("Average bits per read base, assuming average read length %d = %f%n", averageReadLength,
            divide(size, numLogicalAlignmentEntries * averageReadLength));

    stream.printf("Percent paired reads = %,.2f %% %n", divide(numPaired, numQuerySequences * 2) * 100d);
    stream.printf("Percent properly paired reads = %,.2f %% %n",
            divide(numProperlyPaired, numQuerySequences * 2) * 100d);
    stream.printf("Percent first in pair = %,.2f %% %n", divide(numFirstInPair, numEntries) * 100d);
    stream.printf("Percent second in pair = %,.2f %% %n", divide(numSecondInPair, numEntries) * 100d);

    stream.printf("Aligment entries have some softClips: %b %n", hasSoftClips);
}

From source file:edu.cornell.med.icb.geo.BinaryArrayProbesetMinNormalizer.java

private float[] estimateMinValueForProbesets(final double[] sumSignalBySample,
        final GEOPlatformIndexed platform, final int k, final double averageSumForSamples) {

    final float[] signal;
    final int minProbeIndex = 0;
    final int maxProbeIndex = platform.getNumProbeIds();

    try {//from  w  w  w.  jav  a  2  s . c  om

        final ArrayReader arrayReader = getReader(platform);
        signal = arrayReader.allocateSignalArray();
        final ObjectList<MutableString> sampleIds;
        sampleIds = arrayReader.getSampleIdList();

        final AnnotationSet annotationSet = readAnnotationSet(platform, sampleIds);

        progressLogger.expectedUpdates = sampleIds.size();
        progressLogger.start("Starting estimateMinValueForProbesets");
        final float[] minSignalAveragePerProbeset = new float[maxProbeIndex];
        // Create one queue per probeset where the signal values will be enqueued.
        // Capacity is set to k, and largest values are kept.
        final ScoredTranscriptBoundedSizeQueue[] smallestSignalValuesForProbesets = new ScoredTranscriptBoundedSizeQueue[maxProbeIndex];
        for (int i = 0; i < smallestSignalValuesForProbesets.length; i++) {
            smallestSignalValuesForProbesets[i] = new ScoredTranscriptBoundedSizeQueue(k);
        }

        for (int sampleIndex = 0; sampleIndex < sampleIds.size(); ++sampleIndex) {

            // load signal for one sample (all probesets)
            arrayReader.readNextSample(signal);

            for (int probesetIndex = minProbeIndex; probesetIndex < maxProbeIndex; ++probesetIndex) {

                final int transcriptIndex = getTranscriptIndex(probesetIndex);
                if (transcriptIndex != -1 && signal[probesetIndex] != 0) {

                    // probeset maps to a transcript in a non-ambiguous way, and was called 'present' in the sample
                    final double sampleNormalizedSignal = averageSumForSamples * (signal[probesetIndex])
                            / sumSignalBySample[sampleIndex];
                    smallestSignalValuesForProbesets[probesetIndex].enqueue(sampleIndex,
                            -sampleNormalizedSignal);
                }
            }

            progressLogger.update();
        }
        arrayReader.close();
        for (int probesetIndex = 0; probesetIndex < smallestSignalValuesForProbesets.length; probesetIndex++) {
            final SummaryStatistics helper = new SummaryStatistics();
            while (!smallestSignalValuesForProbesets[probesetIndex].isEmpty()) {
                final TranscriptScore score = smallestSignalValuesForProbesets[probesetIndex].dequeue();
                final double smallestSignalValue = -score.score;

                helper.addValue(smallestSignalValue);
            }

            minSignalAveragePerProbeset[probesetIndex] = (float) helper.getMean();
            // System.out.println("estimated "+minSignalAveragePerProbeset[probesetIndex]+" for probeset "+probesetIndex);
        }
        progressLogger.stop("Finished estimateMinValueForProbesets.");
        return minSignalAveragePerProbeset;
    } catch (IOException e) {
        System.err.println("Error reading binary information from file");
        e.printStackTrace();
        System.exit(10);
    } catch (ClassNotFoundException e) {
        System.err.println("Could not initialize an array reader.");
        e.printStackTrace();
        System.exit(10);
    }
    return null;
}

From source file:com.netflix.dyno.connectionpool.impl.lb.CircularListTest.java

private static double checkValues(List<Integer> values) {

    System.out.println("Values: " + values);
    SummaryStatistics ss = new SummaryStatistics();
    for (int i = 0; i < values.size(); i++) {
        ss.addValue(values.get(i));/*w ww  .java 2s  .  c  o m*/
    }

    double mean = ss.getMean();
    double stddev = ss.getStandardDeviation();

    double p = ((stddev * 100) / mean);
    System.out.println("Percentage diff: " + p);

    Assert.assertTrue("" + p + " " + values, p < 0.1);
    return p;
}

From source file:ddf.metrics.reporting.internal.rrd4j.RrdMetricsRetriever.java

private double cumulativeRunningAverage(List<Double> values) {
    if (values.size() == 0) {
        return 0;
    }//from www.j  a va 2  s  .co  m
    SummaryStatistics summaryStatistics = new SummaryStatistics();
    for (Double value : values) {
        summaryStatistics.addValue(value);
    }
    return summaryStatistics.getMean();
}

From source file:org.apache.curator.framework.recipes.queue.TestQueueSharder.java

@Test
public void testDistribution() throws Exception {
    final int threshold = 100;
    final int factor = 10;

    Timing timing = new Timing();
    CuratorFramework client = CuratorFrameworkFactory.newClient(server.getConnectString(), timing.session(),
            timing.connection(), new RetryOneTime(1));
    QueueSharder<String, DistributedQueue<String>> sharder = null;
    try {//from w  ww .j a va 2 s. co m
        client.start();

        final CountDownLatch latch = new CountDownLatch(1);
        QueueConsumer<String> consumer = new QueueConsumer<String>() {
            @Override
            public void consumeMessage(String message) throws Exception {
                latch.await();
            }

            @Override
            public void stateChanged(CuratorFramework client, ConnectionState newState) {
            }
        };
        QueueAllocator<String, DistributedQueue<String>> distributedQueueAllocator = makeAllocator(consumer);
        QueueSharderPolicies policies = QueueSharderPolicies.builder().newQueueThreshold(threshold)
                .thresholdCheckMs(1).build();
        sharder = new QueueSharder<String, DistributedQueue<String>>(client, distributedQueueAllocator,
                "/queues", "/leader", policies);
        sharder.start();

        for (int i = 0; i < (factor * threshold); ++i) {
            sharder.getQueue().put(Integer.toString(i));
            Thread.sleep(5);
        }
        timing.forWaiting().sleepABit();

        SummaryStatistics statistics = new SummaryStatistics();
        for (String path : sharder.getQueuePaths()) {
            int numChildren = client.checkExists().forPath(path).getNumChildren();
            Assert.assertTrue(numChildren > 0);
            Assert.assertTrue(numChildren >= (threshold * .1));
            statistics.addValue(numChildren);
        }
        latch.countDown();

        Assert.assertTrue(statistics.getMean() >= (threshold * .9));
    } finally {
        timing.sleepABit(); // let queue clear
        CloseableUtils.closeQuietly(sharder);
        CloseableUtils.closeQuietly(client);
    }
}