Example usage for org.apache.commons.math3.stat.descriptive DescriptiveStatistics getPercentile

List of usage examples for org.apache.commons.math3.stat.descriptive DescriptiveStatistics getPercentile

Introduction

In this page you can find the example usage for org.apache.commons.math3.stat.descriptive DescriptiveStatistics getPercentile.

Prototype

public double getPercentile(double p) throws MathIllegalStateException, MathIllegalArgumentException 

Source Link

Document

Returns an estimate for the pth percentile of the stored values.

Usage

From source file:gov.nih.nci.caintegrator.application.study.deployment.GenomicDataHelper.java

private float computeGeneReporterValue(Collection<AbstractReporter> probeSetReporters,
        ArrayDataValues probeSetValues, ArrayData arrayData, AbstractReporter geneReporter) {
    Sample sample = arrayData.getSample();
    DescriptiveStatistics statistics = new DescriptiveStatistics();
    for (AbstractReporter reporter : probeSetReporters) {
        statistics.addValue(probeSetValues.getFloatValue(arrayData, reporter, EXPRESSION_SIGNAL));
        if (reporter.getSamplesHighVariance().contains(sample)) {
            geneReporter.getSamplesHighVariance().add(sample);
            sample.getReportersHighVariance().add(geneReporter);
        }/*from  w w  w  .j a v  a  2  s.  c o  m*/
    }
    return (float) statistics.getPercentile(FIFTIETH_PERCENTILE);
}

From source file:com.caseystella.analytics.outlier.streaming.mad.SketchyMovingMADTest.java

@Test
public void testSketchyMovingMAD() throws IOException {
    Random r = new Random(0);
    List<DataPoint> points = new ArrayList<>();
    DescriptiveStatistics stats = new DescriptiveStatistics();
    DescriptiveStatistics medianStats = new DescriptiveStatistics();
    OutlierConfig config = JSONUtil.INSTANCE.load(madConfig, OutlierConfig.class);
    SketchyMovingMAD madAlgo = ((SketchyMovingMAD) config.getSketchyOutlierAlgorithm()).withConfig(config);
    int i = 0;//from   ww  w . jav  a2  s  . co m
    for (i = 0; i < 10000; ++i) {
        double val = r.nextDouble() * 1000 - 10000;
        stats.addValue(val);
        DataPoint dp = (new DataPoint(i, val, null, "foo"));
        madAlgo.analyze(dp);
        points.add(dp);
    }
    for (DataPoint dp : points) {
        medianStats.addValue(Math.abs(dp.getValue() - stats.getPercentile(50)));
    }
    double mad = medianStats.getPercentile(50);
    double median = stats.getPercentile(50);
    {
        double val = getValAtModifiedZScore(3.6, mad, median);
        System.out.println("MODERATE => " + val);
        DataPoint dp = (new DataPoint(i++, val, null, "foo"));
        Severity s = madAlgo.analyze(dp).getSeverity();
        Assert.assertTrue(s == Severity.MODERATE_OUTLIER);
    }
    {
        double val = getValAtModifiedZScore(6, mad, median);
        System.out.println("SEVERE => " + val);
        DataPoint dp = (new DataPoint(i++, val, null, "foo"));
        Severity s = madAlgo.analyze(dp).getSeverity();
        Assert.assertTrue(s == Severity.SEVERE_OUTLIER);
    }

    Assert.assertTrue(madAlgo.getMedianDistributions().get("foo").getAmount() <= 110);
    Assert.assertTrue(madAlgo.getMedianDistributions().get("foo").getChunks().size() <= 12);
}

From source file:iDynoOptimizer.MOEAFramework26.src.org.moeaframework.analysis.diagnostics.LinePlot.java

/**
 * Generates the quantile series for the specified key.
 * //ww w.j  a  v a 2s. c  om
 * @param key the key identifying which result to plot
 * @param dataset the dataset to store the generated series
 */
protected void generateQuantileSeries(ResultKey key, YIntervalSeriesCollection dataset) {
    List<DataPoint> dataPoints = new ArrayList<DataPoint>();

    for (Accumulator accumulator : controller.get(key)) {
        if (!accumulator.keySet().contains(metric)) {
            continue;
        }

        for (int i = 0; i < accumulator.size(metric); i++) {
            dataPoints.add(new DataPoint((Integer) accumulator.get("NFE", i),
                    ((Number) accumulator.get(metric, i)).doubleValue()));
        }
    }

    Collections.sort(dataPoints);

    YIntervalSeries series = new YIntervalSeries(key);
    DescriptiveStatistics statistics = new DescriptiveStatistics();
    int index = 0;
    int currentNFE = RESOLUTION;

    while (index < dataPoints.size()) {
        DataPoint point = dataPoints.get(index);

        if (point.getNFE() <= currentNFE) {
            statistics.addValue(point.getValue());
            index++;
        } else {
            if (statistics.getN() > 0) {
                series.add(currentNFE, statistics.getPercentile(50), statistics.getPercentile(25),
                        statistics.getPercentile(75));
            }

            statistics.clear();
            currentNFE += RESOLUTION;
        }
    }

    if (statistics.getN() > 0) {
        //if only entry, add extra point to display non-zero width
        if (series.isEmpty()) {
            series.add(currentNFE - RESOLUTION, statistics.getPercentile(50), statistics.getPercentile(25),
                    statistics.getPercentile(75));
        }

        series.add(currentNFE, statistics.getPercentile(50), statistics.getPercentile(25),
                statistics.getPercentile(75));
    }

    dataset.addSeries(series);
}

From source file:com.clust4j.utils.VecUtils.java

/**
 * Compute the interquartile range in a vector
 * @param a//from  ww  w.  java  2  s .  c  o  m
 * @throws IllegalArgumentException if the input vector is empty
 * @return the interquartile range
 */
public static double iqr(final double[] a) {
    checkDims(a);
    DescriptiveStatistics d = new DescriptiveStatistics(a);
    return d.getPercentile(75) - d.getPercentile(25);
}

From source file:knop.psfj.BeadAverager.java

/**
 * Average bead./*from ww  w .j a  v  a 2  s .  co m*/
 *
 * @param frameList the frame list
 * @return the image plus
 */
public ImagePlus averageBead(ArrayList<BeadFrame> frameList) {

    ImagePlus result;
    DescriptiveStatistics centerX = new DescriptiveStatistics();
    DescriptiveStatistics centerY = new DescriptiveStatistics();
    DescriptiveStatistics centerZ = new DescriptiveStatistics();

    int originalStackSize = frameList.get(0).getSliceNumber();

    setTotalBeads(frameList.size());

    //frameList = filter(frameList);

    ArrayList<ImageStack> centeredStacks = new ArrayList<ImageStack>();

    for (BeadFrame frame : frameList) {

        System.out.println(frame.getCentroidXAsInt());
        centerX.addValue(frame.getCentroidXAsInt());
        centerY.addValue(frame.getCentroidYAsInt());
        centerZ.addValue(frame.getCentroidZAsInt());

    }

    int chosenCenter = MathUtils.round(centerZ.getPercentile(50));
    int maxUp = chosenCenter - MathUtils.round(chosenCenter - centerZ.getMin());
    int maxDown = originalStackSize - MathUtils.round(centerZ.getMax());

    System.out.println(maxUp);
    System.out.println(maxDown);

    for (BeadFrame frame : frameList) {
        ImageStack subStack = new ImageStack(frame.getSubstack().getWidth(), frame.getSubstack().getHeight());
        int center = frame.getCentroidZAsInt();
        int begin = center - maxUp;
        int end = center + maxDown;
        System.out.println(String.format(" Groing from %d to %d with a center at %d", begin, end, center));
        for (int i = begin; i != end; i++) {
            subStack.addSlice(frame.getSubstack().getImageStack().getProcessor(i + 1));
        }

        centeredStacks.add(subStack);
        //new ImagePlus("",subStack).show();

    }

    System.out.println(centerX);
    System.out.println(centerY);
    System.out.println(centerZ);

    return new ImagePlus("", averageStacks(centeredStacks));

}

From source file:ec.coevolve.MultiPopCoevolutionaryEvaluatorExtra.java

protected Individual[] behaviourElite(EvolutionState state, int subpop) {
    // Generate the dataset
    ArrayList<IndividualClusterable> points = new ArrayList<IndividualClusterable>();
    if (novelChampionsOrigin == NovelChampionsOrigin.halloffame) {
        for (int i = 0; i < hallOfFame[subpop].size(); i++) {
            points.add(new IndividualClusterable(hallOfFame[subpop].get(i), i));
        }// w  ww .j  a v a 2  s . c  o m
    } else if (novelChampionsOrigin == NovelChampionsOrigin.archive) {
        for (ArchiveEntry ae : archives[subpop]) {
            points.add(new IndividualClusterable(ae.getIndividual(), ae.getGeneration()));
        }
    }

    // Cap -- only use the individuals with the highest fitness scores
    if (novelChampionsCap > 0) {
        // calculate the percentile
        DescriptiveStatistics ds = new DescriptiveStatistics();
        for (IndividualClusterable ic : points) {
            ds.addValue(ic.getFitness());
        }
        double percentile = ds.getPercentile(novelChampionsCap);

        // remove those below the percentile
        Iterator<IndividualClusterable> iter = points.iterator();
        while (iter.hasNext()) {
            IndividualClusterable next = iter.next();
            if (next.getFitness() < percentile) {
                iter.remove();
            }
        }
    }

    // Check if there are enough points for clustering
    if (points.size() <= novelChampions) {
        Individual[] elite = new Individual[points.size()];
        for (int i = 0; i < elite.length; i++) {
            elite[i] = points.get(i).getIndividual();
        }
        return elite;
    }

    // Do the k-means clustering
    KMeansPlusPlusClusterer<IndividualClusterable> clusterer = new KMeansPlusPlusClusterer<IndividualClusterable>(
            novelChampions, 100);
    List<CentroidCluster<IndividualClusterable>> clusters = clusterer.cluster(points);

    // Return one from each cluster
    Individual[] elite = new Individual[novelChampions];
    for (int i = 0; i < clusters.size(); i++) {
        CentroidCluster<IndividualClusterable> cluster = clusters.get(i);
        List<IndividualClusterable> clusterPoints = cluster.getPoints();
        if (novelChampionsMode == NovelChampionsMode.random) {
            int randIndex = state.random[0].nextInt(clusterPoints.size());
            elite[i] = clusterPoints.get(randIndex).getIndividual();
        } else if (novelChampionsMode == NovelChampionsMode.last) {
            IndividualClusterable oldest = null;
            for (IndividualClusterable ic : clusterPoints) {
                if (oldest == null || ic.age > oldest.age) {
                    oldest = ic;
                }
            }
            elite[i] = oldest.getIndividual();
        } else if (novelChampionsMode == NovelChampionsMode.centroid) {
            DistanceMeasure dm = clusterer.getDistanceMeasure();
            double[] centroid = cluster.getCenter().getPoint();
            IndividualClusterable closest = null;
            double closestDist = Double.MAX_VALUE;
            for (IndividualClusterable ic : clusterPoints) {
                double dist = dm.compute(centroid, ic.getPoint());
                if (dist < closestDist) {
                    closestDist = dist;
                    closest = ic;
                }
            }
            elite[i] = closest.getIndividual();
        } else if (novelChampionsMode == NovelChampionsMode.best) {
            IndividualClusterable best = null;
            float highestFit = Float.NEGATIVE_INFINITY;
            for (IndividualClusterable ic : clusterPoints) {
                if (ic.getFitness() > highestFit) {
                    best = ic;
                    highestFit = ic.getFitness();
                }
            }
            elite[i] = best.getIndividual();
        }
    }
    return elite;
}

From source file:gdsc.smlm.ij.plugins.SpotInspector.java

public void run(String arg) {
    if (MemoryPeakResults.countMemorySize() == 0) {
        IJ.error(TITLE, "No localisations in memory");
        return;//w  ww  .  j  a  v  a2  s .  c  o  m
    }

    if (!showDialog())
        return;

    // Load the results
    results = ResultsManager.loadInputResults(inputOption, false);
    if (results == null || results.size() == 0) {
        IJ.error(TITLE, "No results could be loaded");
        IJ.showStatus("");
        return;
    }

    // Check if the original image is open
    ImageSource source = results.getSource();
    if (source == null) {
        IJ.error(TITLE, "Unknown original source image");
        return;
    }
    source = source.getOriginal();
    if (!source.open()) {
        IJ.error(TITLE, "Cannot open original source image: " + source.toString());
        return;
    }
    final float stdDevMax = getStandardDeviation(results);
    if (stdDevMax < 0) {
        // TODO - Add dialog to get the initial peak width
        IJ.error(TITLE, "Fitting configuration (for initial peak width) is not available");
        return;
    }

    // Rank spots
    rankedResults = new ArrayList<PeakResultRank>(results.size());
    final double a = results.getNmPerPixel();
    final double gain = results.getGain();
    final boolean emCCD = results.isEMCCD();

    for (PeakResult r : results.getResults()) {
        float[] score = getScore(r, a, gain, emCCD, stdDevMax);
        rankedResults.add(new PeakResultRank(r, score[0], score[1]));
    }
    Collections.sort(rankedResults);

    // Prepare results table. Get bias if necessary
    if (showCalibratedValues) {
        // Get a bias if required
        Calibration calibration = results.getCalibration();
        if (calibration.bias == 0) {
            GenericDialog gd = new GenericDialog(TITLE);
            gd.addMessage("Calibrated results requires a camera bias");
            gd.addNumericField("Camera_bias (ADUs)", calibration.bias, 2);
            gd.showDialog();
            if (!gd.wasCanceled()) {
                calibration.bias = Math.abs(gd.getNextNumber());
            }
        }
    }

    IJTablePeakResults table = new IJTablePeakResults(false, results.getName(), true);
    table.copySettings(results);
    table.setTableTitle(TITLE);
    table.setAddCounter(true);
    table.setShowCalibratedValues(showCalibratedValues);
    table.begin();

    // Add a mouse listener to jump to the frame for the clicked line
    textPanel = table.getResultsWindow().getTextPanel();

    // We must ignore old instances of this class from the mouse listeners
    id = ++currentId;
    textPanel.addMouseListener(this);

    // Add results to the table
    int n = 0;
    for (PeakResultRank rank : rankedResults) {
        rank.rank = n++;
        PeakResult r = rank.peakResult;
        table.add(r.peak, r.origX, r.origY, r.origValue, r.error, r.noise, r.params, r.paramsStdDev);
    }
    table.end();

    if (plotScore || plotHistogram) {
        // Get values for the plots
        float[] xValues = null, yValues = null;
        double yMin, yMax;

        int spotNumber = 0;
        xValues = new float[rankedResults.size()];
        yValues = new float[xValues.length];
        for (PeakResultRank rank : rankedResults) {
            xValues[spotNumber] = spotNumber + 1;
            yValues[spotNumber++] = recoverScore(rank.score);
        }

        // Set the min and max y-values using 1.5 x IQR 
        DescriptiveStatistics stats = new DescriptiveStatistics();
        for (float v : yValues)
            stats.addValue(v);
        if (removeOutliers) {
            double lower = stats.getPercentile(25);
            double upper = stats.getPercentile(75);
            double iqr = upper - lower;

            yMin = FastMath.max(lower - iqr, stats.getMin());
            yMax = FastMath.min(upper + iqr, stats.getMax());

            IJ.log(String.format("Data range: %f - %f. Plotting 1.5x IQR: %f - %f", stats.getMin(),
                    stats.getMax(), yMin, yMax));
        } else {
            yMin = stats.getMin();
            yMax = stats.getMax();

            IJ.log(String.format("Data range: %f - %f", yMin, yMax));
        }

        plotScore(xValues, yValues, yMin, yMax);
        plotHistogram(yValues, yMin, yMax);
    }

    // Extract spots into a stack
    final int w = source.getWidth();
    final int h = source.getHeight();
    final int size = 2 * radius + 1;
    ImageStack spots = new ImageStack(size, size, rankedResults.size());

    // To assist the extraction of data from the image source, process them in time order to allow 
    // frame caching. Then set the appropriate slice in the result stack
    Collections.sort(rankedResults, new Comparator<PeakResultRank>() {
        public int compare(PeakResultRank o1, PeakResultRank o2) {
            if (o1.peakResult.peak < o2.peakResult.peak)
                return -1;
            if (o1.peakResult.peak > o2.peakResult.peak)
                return 1;
            return 0;
        }
    });

    for (PeakResultRank rank : rankedResults) {
        PeakResult r = rank.peakResult;

        // Extract image
        // Note that the coordinates are relative to the middle of the pixel (0.5 offset)
        // so do not round but simply convert to int
        final int x = (int) (r.params[Gaussian2DFunction.X_POSITION]);
        final int y = (int) (r.params[Gaussian2DFunction.Y_POSITION]);

        // Extract a region but crop to the image bounds
        int minX = x - radius;
        int minY = y - radius;
        int maxX = FastMath.min(x + radius + 1, w);
        int maxY = FastMath.min(y + radius + 1, h);

        int padX = 0, padY = 0;
        if (minX < 0) {
            padX = -minX;
            minX = 0;
        }
        if (minY < 0) {
            padY = -minY;
            minY = 0;
        }
        int sizeX = maxX - minX;
        int sizeY = maxY - minY;

        float[] data = source.get(r.peak, new Rectangle(minX, minY, sizeX, sizeY));
        // Prevent errors with missing data
        if (data == null)
            data = new float[sizeX * sizeY];
        ImageProcessor spotIp = new FloatProcessor(sizeX, sizeY, data, null);

        // Pad if necessary, i.e. the crop is too small for the stack
        if (padX > 0 || padY > 0 || sizeX < size || sizeY < size) {
            ImageProcessor spotIp2 = spotIp.createProcessor(size, size);
            spotIp2.insert(spotIp, padX, padY);
            spotIp = spotIp2;
        }
        int slice = rank.rank + 1;
        spots.setPixels(spotIp.getPixels(), slice);
        spots.setSliceLabel(Utils.rounded(rank.originalScore), slice);
    }

    source.close();

    ImagePlus imp = Utils.display(TITLE, spots);
    imp.setRoi((PointRoi) null);

    // Make bigger      
    for (int i = 10; i-- > 0;)
        imp.getWindow().getCanvas().zoomIn(imp.getWidth() / 2, imp.getHeight() / 2);
}

From source file:com.github.jessemull.microflexdouble.stat.PercentileTest.java

/**
 * Tests the plate statistics method.//from ww w .jav a2 s .  c  o  m
 */
@Test
public void testPlate() {

    for (Plate plate : array) {

        int inputPercentile = 1 + random.nextInt(100);

        Map<Well, Double> resultMap = new TreeMap<Well, Double>();
        Map<Well, Double> returnedMap = percentile.plate(plate, inputPercentile);

        for (Well well : plate) {

            double[] input = new double[well.size()];
            int index = 0;

            for (double db : well) {
                input[index++] = db;
            }

            DescriptiveStatistics stat = new DescriptiveStatistics(input);
            double result = stat.getPercentile(inputPercentile);

            resultMap.put(well, result);
        }

        for (Well well : plate) {

            double result = Precision.round(resultMap.get(well), precision);
            double returned = Precision.round(returnedMap.get(well), precision);

            assertTrue(result == returned);
        }
    }
}

From source file:com.github.jessemull.microflex.stat.statinteger.PercentileIntegerTest.java

/**
 * Tests the plate statistics method.//from  w w  w. ja  v a 2s  . c om
 */
@Test
public void testPlate() {

    for (PlateInteger plate : array) {

        int inputPercentile = 1 + random.nextInt(100);

        Map<WellInteger, Double> resultMap = new TreeMap<WellInteger, Double>();
        Map<WellInteger, Double> returnedMap = percentile.plate(plate, inputPercentile);

        for (WellInteger well : plate) {

            double[] input = new double[well.size()];
            int index = 0;

            for (double db : well) {
                input[index++] = db;
            }

            DescriptiveStatistics stat = new DescriptiveStatistics(input);
            double result = stat.getPercentile(inputPercentile);

            resultMap.put(well, result);
        }

        for (WellInteger well : plate) {

            double result = Precision.round(resultMap.get(well), precision);
            double returned = Precision.round(returnedMap.get(well), precision);

            assertTrue(result == returned);
        }
    }
}

From source file:com.github.jessemull.microflexdouble.stat.PercentileTest.java

/**
 * Tests set calculation./*from  w w  w .  java 2  s  .  co m*/
 */
@Test
public void testSet() {

    int inputPercentile = 1 + random.nextInt(100);

    for (Plate plate : array) {

        Map<Well, Double> resultMap = new TreeMap<Well, Double>();
        Map<Well, Double> returnedMap = percentile.set(plate.dataSet(), inputPercentile);

        for (Well well : plate) {

            double[] input = new double[well.size()];
            int index = 0;

            for (double db : well) {
                input[index++] = db;
            }

            DescriptiveStatistics stat = new DescriptiveStatistics(input);
            double result = stat.getPercentile(inputPercentile);

            resultMap.put(well, result);
        }

        for (Well well : plate) {

            double result = Precision.round(resultMap.get(well), precision);
            double returned = Precision.round(returnedMap.get(well), precision);

            assertTrue(result == returned);
        }
    }

}