Example usage for org.apache.commons.math3.stat.descriptive DescriptiveStatistics getPercentile

List of usage examples for org.apache.commons.math3.stat.descriptive DescriptiveStatistics getPercentile

Introduction

In this page you can find the example usage for org.apache.commons.math3.stat.descriptive DescriptiveStatistics getPercentile.

Prototype

public double getPercentile(double p) throws MathIllegalStateException, MathIllegalArgumentException 

Source Link

Document

Returns an estimate for the pth percentile of the stored values.

Usage

From source file:com.datatorrent.netlet.benchmark.util.BenchmarkResults.java

private String getResults() {
    DescriptiveStatistics statistics = getDescriptiveStatistics();
    final StringBuilder sb = new StringBuilder();
    sb.append("Iterations: ").append(statistics.getN());
    sb.append(" | Avg Time: ").append(fromNanoTime(statistics.getMean()));
    sb.append(" | Min Time: ").append(fromNanoTime(statistics.getMin()));
    sb.append(" | Max Time: ").append(fromNanoTime(statistics.getMax()));
    sb.append(" | 75% Time: ").append(fromNanoTime(statistics.getPercentile(75d)));
    sb.append(" | 90% Time: ").append(fromNanoTime(statistics.getPercentile(90d)));
    sb.append(" | 99% Time: ").append(fromNanoTime(statistics.getPercentile(99d)));
    sb.append(" | 99.9% Time: ").append(fromNanoTime(statistics.getPercentile(99.9d)));
    sb.append(" | 99.99% Time: ").append(fromNanoTime(statistics.getPercentile(99.99d)));
    sb.append(" | 99.999% Time: ").append(fromNanoTime(statistics.getPercentile(99.999d)));

    return sb.toString();

}

From source file:edu.nyu.vida.data_polygamy.exp.NoiseExp.java

public double getIQR(ArrayList<Float> arrayList) {
    double[] vals = new double[arrayList.size()];
    for (int i = 0; i < arrayList.size(); i++) {
        vals[i] = (double) arrayList.get(i);
    }/*from w  w w .ja va  2 s.c o m*/
    DescriptiveStatistics ds = new DescriptiveStatistics(vals);
    double fq = ds.getPercentile(25);
    double tq = ds.getPercentile(75);
    return (tq - fq);
}

From source file:gr.iti.mklab.reveal.forensics.util.Util.java

public static float[][] medianFilterSingleChannelImage(double[][] imIn, int medianFilterSize) {
    // Median filter a 2D double array
    // medianFilterSize should be odd
    int offset = (medianFilterSize - 1) / 2;
    int imWidth = imIn.length;
    int imHeight = imIn[0].length;
    DescriptiveStatistics blockValues;

    float[][] filteredImage = new float[imWidth - 2 * offset][imHeight - 2 * offset];

    for (int ii = offset; ii <= imWidth - medianFilterSize + offset; ii = ii + 1) {
        for (int jj = offset; jj <= imHeight - medianFilterSize + offset; jj = jj + 1) {
            blockValues = new DescriptiveStatistics();
            for (int B_ii = ii - offset; B_ii < ii + offset + 1; B_ii++) {
                for (int B_jj = jj - offset; B_jj < jj + offset + 1; B_jj++) {
                    blockValues.addValue(Math.abs(imIn[B_ii][B_jj]));
                }/*  www.  j a va 2  s .co m*/
            }
            filteredImage[ii - offset][jj - offset] = (float) blockValues.getPercentile(50);
        }
    }
    return filteredImage;
}

From source file:edu.nyu.vida.data_polygamy.ctdata.TopologicalIndex.java

private double iqOutlierTh(double[] vals, boolean min) {
    DescriptiveStatistics ds = new DescriptiveStatistics(vals);
    double fq = ds.getPercentile(25);
    double tq = ds.getPercentile(75);
    double iqr = tq - fq;

    if (min) {/*from   w w  w  .  j  a  va  2 s  .com*/
        double th = fq - 1.5 * iqr;
        return (th - epsilon);
    } else {
        double th = tq + 1.5 * iqr;
        return (th + epsilon);
    }
}

From source file:com.github.brandtg.stl.StlDecomposition.java

/**
 * Computes robustness weights using bisquare weight function.
 *
 * @param remainder/* w w  w  .  ja  v  a  2 s.  co m*/
 *  The remainder, series - trend - seasonal.
 * @return
 *  A new array containing the robustness weights.
 */
private double[] robustnessWeights(double[] remainder) {
    // Compute "h" = 6 median(|R_v|)
    double[] absRemainder = new double[remainder.length];
    for (int i = 0; i < remainder.length; i++) {
        absRemainder[i] = Math.abs(remainder[i]);
    }
    DescriptiveStatistics stats = new DescriptiveStatistics(absRemainder);
    double outlierThreshold = 6 * stats.getPercentile(50);

    // Compute robustness weights
    double[] robustness = new double[remainder.length];
    for (int i = 0; i < remainder.length; i++) {
        robustness[i] = biSquareWeight(absRemainder[i] / outlierThreshold);
    }

    return robustness;
}

From source file:info.financialecology.finance.utilities.datastruct.DoubleTimeSeries.java

public double percentile(int percentile, int window) {

    Assertion.assertStrict(window <= this.values.size(), Level.ERR,
            "percentile(): Length of time series must be larger than the window (" + window + ").");

    DescriptiveStatistics stats = new DescriptiveStatistics();
    int tsLength = this.values.size();

    for (int i = 0; i < window; i++) {
        stats.addValue(this.values.get(tsLength - i - 1));
    }//from w  ww .ja  v a2 s.c om

    return stats.getPercentile(percentile);
}

From source file:com.caseystella.analytics.distribution.DistributionTest.java

@Test
public void testQuantiles() {
    Random r = new Random(0);
    List<DataPoint> points = new ArrayList<>();
    DescriptiveStatistics stats = new DescriptiveStatistics();
    Distribution distribution = null;//from  w w  w  . j a v  a  2 s  .  co m
    for (int i = 0; i < 100; ++i) {
        double val = r.nextDouble() * 1000;
        DataPoint dp = (new DataPoint(i, val, null, "foo"));
        points.add(dp);
        stats.addValue(val);
        if (distribution == null) {
            distribution = new Distribution(dp, ScalingFunctions.NONE, new GlobalStatistics());
        } else {
            distribution.addDataPoint(dp, ScalingFunctions.NONE);
        }
    }
    double realMedian = stats.getPercentile(50);
    double approxMedian = distribution.getMedian();
    System.out.println("mean and std dev: " + stats.getMean() + ", " + Math.sqrt(stats.getVariance()));
    System.out.println("Real : " + realMedian + ", approx: " + approxMedian);
    Assert.assertTrue(Math.abs(realMedian - approxMedian) < 5);
}

From source file:org.sakaiproject.gradebookng.tool.panels.SettingsGradingSchemaPanel.java

/**
 * Calculates the median grade for the course
 * /*from   www.  j a  v a 2s. c  o m*/
 * @return String median grade
 */
private String getMedian(DescriptiveStatistics stats) {
    return this.total > 0 ? String.format("%.2f", stats.getPercentile(50)) : "-";
}

From source file:com.iorga.webappwatcher.analyzer.model.session.RequestsGraph.java

public synchronized Graph compute(final GraphMode graphMode, int nbItemsForDispersionTables)
        throws ClassNotFoundException, IOException {
    if (graph == null || this.nbItemsForDispersionTables != nbItemsForDispersionTables
            || this.graphMode != graphMode) {
        final Graph graph = new Graph();

        /// now let's build the json series ///

        // first, we must create the list of different Y values

        final boolean isStaticMode = graphMode == GraphMode.STATIC;
        if (isStaticMode) {
            nbItemsForDispersionTables = staticDispersionTable.size() + 2; // +2 because we will add the median, and the max
        } else {//from  ww  w.  j a  v a  2 s .  c  om
            nbItemsForDispersionTables = this.nbItemsForDispersionTables;
        }
        final double[] yValues = new double[nbItemsForDispersionTables];

        graph.durationsFor1clickDispersionSeries = new ArrayList<Serie>(nbItemsForDispersionTables);

        final DescriptiveStatistics totalDurationsFor1click = durationPerPrincipalStats
                .computeTotalDurationsFor1click();
        if (isStaticMode) {
            // static mode : median / 1s / 2s / 3s / 5s / 10s / 20s / max (median should be ordered)
            final List<Double> yValuesList = Lists.newArrayList(staticDispersionTable);
            yValuesList.add(totalDurationsFor1click.getPercentile(50)); // Add the median
            yValuesList.add(totalDurationsFor1click.getMax()); // Add max
            final List<Double> sortedYValuesList = Ordering.natural().sortedCopy(yValuesList);
            int i = 0;
            for (final Double yValue : sortedYValuesList) {
                yValues[i++] = yValue;
            }
        } else {
            for (int i = 0; i < yValues.length; i++) {
                yValues[i] = totalDurationsFor1click
                        .getPercentile((i + 1d) / nbItemsForDispersionTables * 100d);
            }
        }
        // compute the labels
        for (int i = 0; i < yValues.length; i++) {
            graph.durationsFor1clickDispersionSeries
                    .add(new Serie(i == 0 ? 0 : (int) yValues[i - 1], (int) yValues[i]));
        }

        // Now let's compute the datas for each Y values by slice

        TimeSlice previousTimeSlice = null;
        for (final TimeSlice timeSlice : durationPerPrincipalStats.computeTimeSliceList()) {
            final long endDateTime = timeSlice.getEndDate().getTime();
            final long startDateTime = timeSlice.getStartDate().getTime();
            //TODO : amliorer cet algorithme en itrant sur chaque value de totalDurationsFor1click et pour chacune d'elle aller chercher par dichotomie l'entier  incrmenter correspondant  la bonne tranche des yValues
            final Date middleTimeSliceDate = new Date((endDateTime + startDateTime) / 2); // the data should be displayed in the middle of the slice
            final boolean mustAppendNullForPrevious = previousTimeSlice != null
                    && previousTimeSlice.getEndDate().getTime() != startDateTime;
            for (int i = 0; i < yValues.length; i++) {
                final Serie serie = graph.durationsFor1clickDispersionSeries.get(i);
                final double maxInclude = serie.max;
                final double minExclude = serie.min;
                final double[] values = timeSlice.getDurationsFor1click().getValues();
                int n = 0;
                for (final double value : values) {
                    if (minExclude < value && value <= maxInclude) {
                        n++;
                    }
                }
                addNewDateDoubleValueAndNullForPreviousIfNecessary(serie.data, middleTimeSliceDate, n,
                        mustAppendNullForPrevious, previousTimeSlice);
            }
            // adding cpu & memory info
            addNewDateDoubleValueAndNullForPreviousIfNecessary(graph.cpuUsageMeans, middleTimeSliceDate,
                    timeSlice.getCpuUsage().getMean(), mustAppendNullForPrevious, previousTimeSlice);
            addNewDateDoubleValueAndNullForPreviousIfNecessary(graph.memoryUsageMeans, middleTimeSliceDate,
                    timeSlice.getMemoryUsage().getMean(), mustAppendNullForPrevious, previousTimeSlice);
            addNewDateDoubleValueAndNullForPreviousIfNecessary(graph.nbUsersMax, middleTimeSliceDate,
                    timeSlice.getStatsPerPrincipal().size(), mustAppendNullForPrevious, previousTimeSlice);
            addNewDateDoubleValueAndNullForPreviousIfNecessary(graph.durationsFor1clickMedians,
                    middleTimeSliceDate, timeSlice.getDurationsFor1click().getPercentile(50),
                    mustAppendNullForPrevious, previousTimeSlice);

            previousTimeSlice = timeSlice;
        }

        this.nbItemsForDispersionTables = nbItemsForDispersionTables;
        this.graphMode = graphMode;
        this.graph = graph; // because the changement of the statistics will reset the graph here
    }

    return graph;
}

From source file:de.iisys.schub.processMining.similarity.AlgoController.java

private double getDocsPercentile(List<Double> cosineSimValues) {
    DescriptiveStatistics stat = new DescriptiveStatistics();
    for (int i = 0; i < cosineSimValues.size(); i++) {
        stat.addValue(cosineSimValues.get(i));
    }/*from  w w  w.j  av a  2  s . com*/
    return Math.round(stat.getPercentile(PERCENTILE) * 1000) / 1000.0;
}