Example usage for org.apache.commons.math3.stat.descriptive DescriptiveStatistics getPercentile

List of usage examples for org.apache.commons.math3.stat.descriptive DescriptiveStatistics getPercentile

Introduction

In this page you can find the example usage for org.apache.commons.math3.stat.descriptive DescriptiveStatistics getPercentile.

Prototype

public double getPercentile(double p) throws MathIllegalStateException, MathIllegalArgumentException 

Source Link

Document

Returns an estimate for the pth percentile of the stored values.

Usage

From source file:knop.psfj.exporter.HTMLDataSetExporter.java

/**
 * Gets the statistics./* w ww.  jav  a2  s  .  co  m*/
 *
 * @param bead the bead
 * @return the statistics
 */
public String getStatistics(int bead) {

    StringBuffer result = new StringBuffer(1000);

    boolean isSingleChannel = (manager.getAnalysisType() == BeadImageManager.SINGLE_CHANNEL);

    HashMap<String, String> valueToReplace = new HashMap<String, String>();
    Microscope m = manager.getMicroscope(bead);

    valueToReplace.put("%microscope_id", m.getIdentifier());
    valueToReplace.put("%bead_number", "" + manager.getKeptBeadsCount());
    valueToReplace.put("%totalBeads", "" + (manager.getKeptBeadsCount() + manager.getDeletedBeads()));
    valueToReplace.put("%wave_length", m.getWaveLengthAsString());
    valueToReplace.put("%NA", m.getNAAsString());
    valueToReplace.put("%refraction", m.getRefractionIndexAsString());
    valueToReplace.put("%bead_size", m.getBeadSizeAsString());
    valueToReplace.put("%voxel_size", m.getVoxelSizeAsString());
    valueToReplace.put("%deletedBeads", "" + manager.getDeletedBeads());
    valueToReplace.put("%threshold%", "" + manager.getBeadImage(0).getThresholdValue());
    valueToReplace.put("%substack_size%", "" + manager.getBeadImage(0).getFrameSize());

    FovDataSet dataSet;
    if (isSingleChannel) {
        dataSet = manager.getDataSet();
    } else {
        dataSet = manager.getBeadImage(bead).getBeadFrameList().getDataSet();

    }

    DescriptiveStatistics zProfileStats = dataSet.getColumnStatistics("z_profile");

    valueToReplace.put("%fwhm_X", manager.getHeatmapStatisticsAsString(
            PSFj.getHeatmapName(PSFj.FWHM_KEY, 0, -1), PSFj.NOT_NORMALIZED, m.getUnit()));
    valueToReplace.put("%fwhm_th_X", "" + MathUtils.formatDouble(m.getXYTheoreticalResolution(), m.getUnit()));
    //String encoded=ImageProcessorUtils.getEncodedPNGBase64(manager.getGraphList().get(0).getGraph(1).resize(100));
    //System.out.println(encoded);
    //valueToReplace.put("%heatmap%",encoded);
    valueToReplace.put("%fwhm_Y", manager.getHeatmapStatisticsAsString(
            PSFj.getHeatmapName(PSFj.FWHM_KEY, 1, -1), PSFj.NOT_NORMALIZED, m.getUnit()));
    valueToReplace.put("%fwhm_th_Y", "" + MathUtils.formatDouble(m.getXYTheoreticalResolution(), m.getUnit()));

    valueToReplace.put("%fwhm_Z", manager.getHeatmapStatisticsAsString(
            PSFj.getHeatmapName(PSFj.FWHM_KEY, 2, -1), PSFj.NOT_NORMALIZED, m.getUnit()));
    valueToReplace.put("%fwhm_th_Z", "" + MathUtils.formatDouble(m.getZTheoreticalResolution(), m.getUnit()));

    valueToReplace.put(("%z_profile"),
            "" + MathUtils.formatDouble(zProfileStats.getPercentile(50), m.getUnit()));

    String planrity = manager.getHeatmapStatisticsAsString(PSFj.getHeatmapName(PSFj.Z_PROFILE, -1),
            PSFj.NOT_NORMALIZED, m.getUnit());
    String asymmetry = manager.getHeatmapStatisticsAsString(PSFj.getHeatmapName(PSFj.ASYMMETRY_KEY, -1),
            PSFj.NOT_NORMALIZED, "");

    valueToReplace.put("%asymmetry%", asymmetry);
    valueToReplace.put("%planarity%", planrity);

    result.append(TextUtils.readTextRessource(valueToReplace, "/mini-report.html", valueToReplace));

    TextUtils.writeStringToFile("/home/cyril/test.html", result.toString(), false);

    return result.toString();
}

From source file:com.fpuna.preproceso.PreprocesoTS.java

private static TrainingSetFeature calculoFeaturesMagnitud(List<Registro> muestras, String activity) {

    TrainingSetFeature Feature = new TrainingSetFeature();
    DescriptiveStatistics stats_m = new DescriptiveStatistics();

    double[] fft_m;
    double[] AR_4;

    muestras = Util.calcMagnitud(muestras);

    for (int i = 0; i < muestras.size(); i++) {
        stats_m.addValue(muestras.get(i).getM_1());
    }//from w w w.  j  av  a 2 s  .c om

    //********* FFT *********
    //fft_m = Util.transform(stats_m.getValues());
    fft_m = FFTMixedRadix.fftPowerSpectrum(stats_m.getValues());

    //******************* Calculos Magnitud *******************//
    //mean(s) - Arithmetic mean
    System.out.print(stats_m.getMean() + ",");
    Feature.setMeanX((float) stats_m.getMean());

    //std(s) - Standard deviation
    System.out.print(stats_m.getStandardDeviation() + ",");
    Feature.setStdX((float) stats_m.getStandardDeviation());

    //mad(s) - Median absolute deviation
    //
    //max(s) - Largest values in array
    System.out.print(stats_m.getMax() + ",");
    Feature.setMaxX((float) stats_m.getMax());

    //min(s) - Smallest value in array
    System.out.print(stats_m.getMin() + ",");
    Feature.setMinX((float) stats_m.getMin());

    //skewness(s) - Frequency signal Skewness
    System.out.print(stats_m.getSkewness() + ",");
    Feature.setSkewnessX((float) stats_m.getSkewness());

    //kurtosis(s) - Frequency signal Kurtosis
    System.out.print(stats_m.getKurtosis() + ",");
    Feature.setKurtosisX((float) stats_m.getKurtosis());

    //energy(s) - Average sum of the squares
    System.out.print(stats_m.getSumsq() / stats_m.getN() + ",");
    Feature.setEnergyX((float) (stats_m.getSumsq() / stats_m.getN()));

    //entropy(s) - Signal Entropy
    System.out.print(Util.calculateShannonEntropy(fft_m) + ",");
    Feature.setEntropyX(Util.calculateShannonEntropy(fft_m).floatValue());

    //iqr (s) Interquartile range
    System.out.print(stats_m.getPercentile(75) - stats_m.getPercentile(25) + ",");
    Feature.setIqrX((float) (stats_m.getPercentile(75) - stats_m.getPercentile(25)));

    try {
        //autoregression (s) -4th order Burg Autoregression coefficients
        AR_4 = AutoRegression.calculateARCoefficients(stats_m.getValues(), 4, true);
        System.out.print(AR_4[0] + ",");
        System.out.print(AR_4[1] + ",");
        System.out.print(AR_4[2] + ",");
        System.out.print(AR_4[3] + ",");
        Feature.setArX1((float) AR_4[0]);
        Feature.setArX2((float) AR_4[1]);
        Feature.setArX3((float) AR_4[2]);
        Feature.setArX4((float) AR_4[3]);
    } catch (Exception ex) {
        Logger.getLogger(PreprocesoTS.class.getName()).log(Level.SEVERE, null, ex);
    }
    //meanFreq(s) - Frequency signal weighted average
    System.out.print(Util.meanFreq(fft_m, stats_m.getValues()) + ",");
    Feature.setMeanFreqx((float) Util.meanFreq(fft_m, stats_m.getValues()));

    //******************* Actividad *******************/
    System.out.print(activity);
    System.out.print("\n");
    Feature.setEtiqueta(activity);

    return Feature;
}

From source file:com.itemanalysis.jmetrik.stats.descriptives.DescriptiveAnalysis.java

public void publishTable(VariableAttributes v) {
    TextTable table = null;//from  ww  w .  ja v  a 2  s  .  c o m
    TextTableColumnFormat[] cformats = new TextTableColumnFormat[2];
    cformats[0] = new TextTableColumnFormat();
    cformats[0].setStringFormat(15, TextTableColumnFormat.OutputAlignment.LEFT);
    cformats[1] = new TextTableColumnFormat();
    cformats[1].setDoubleFormat(10, 4, TextTableColumnFormat.OutputAlignment.RIGHT);

    DescriptiveStatistics temp = data.get(v);
    table = new TextTable();
    table.addAllColumnFormats(cformats, 17);
    table.getRowAt(0).addHeader(0, 2, v.getName().toString(), TextTablePosition.CENTER);
    table.getRowAt(1).addHorizontalRule(0, 2, "=");
    table.getRowAt(2).addHeader(0, 1, "Statistic", TextTablePosition.CENTER);
    table.getRowAt(2).addHeader(1, 1, "Value", TextTablePosition.CENTER);
    table.getRowAt(3).addHorizontalRule(0, 2, "-");

    table.addStringAt(4, 0, "N");
    table.addDoubleAt(4, 1, maxProgress);
    table.addStringAt(5, 0, "Valid N");
    table.addDoubleAt(5, 1, temp.getN());
    table.addStringAt(6, 0, "Min");
    table.addDoubleAt(6, 1, temp.getMin());
    table.addStringAt(7, 0, "Max");
    table.addDoubleAt(7, 1, temp.getMax());
    table.addStringAt(8, 0, "Mean");
    table.addDoubleAt(8, 1, temp.getMean());
    table.addStringAt(9, 0, "Std. Dev.");
    table.addDoubleAt(9, 1, temp.getStandardDeviation());
    table.addStringAt(10, 0, "Skewness");
    table.addDoubleAt(10, 1, temp.getSkewness());
    table.addStringAt(11, 0, "Kurtosis");
    table.addDoubleAt(11, 1, temp.getKurtosis());
    table.addStringAt(12, 0, "First Quartile");
    table.addDoubleAt(12, 1, temp.getPercentile(25));
    table.addStringAt(13, 0, "Median");
    table.addDoubleAt(13, 1, temp.getPercentile(50));
    table.addStringAt(14, 0, "Third Quartile");
    table.addDoubleAt(14, 1, temp.getPercentile(75));
    table.addStringAt(15, 0, "IQR");
    table.addDoubleAt(15, 1, temp.getPercentile(75) - temp.getPercentile(25));
    table.getRowAt(16).addHorizontalRule(0, 2, "=");

    publish(table.toString() + "\n");

}

From source file:knop.psfj.FovDataSet.java

/**
 * Gets the min./*  w  w  w  .  ja  va  2  s  .co m*/
 *
 * @param column the column
 * @return the min
 */
public double getMin(String column) {
    if (column.contains("fwhm")) {
        return getTheoriticalValue(column) / 2;
    }

    if (column.contains("delta") || column.contains("norm")) {
        return -1;
    }

    DescriptiveStatistics columnStats = getColumnStatistics(column);
    double median = columnStats.getPercentile(50);
    double stdDev = columnStats.getStandardDeviation();
    if (stdDev == 0)
        return -1;
    return median - (stdDev * 2);

}

From source file:knop.psfj.FovDataSet.java

/**
 * Gets the max.//  w  w  w .  ja va 2s  .  c o  m
 *
 * @param column the column
 * @return the max
 */
public double getMax(String column) {
    if (column.contains("fwhm")) {
        return getTheoriticalValue(column) * 2;
    }

    if (column.contains("norm"))
        return 1;

    if (column.equals("z_profile") || column.contains("delta")) {
        return 1;
    }

    DescriptiveStatistics columnStats = getColumnStatistics(column);
    double median = columnStats.getPercentile(50);
    double stdDev = columnStats.getStandardDeviation();
    if (stdDev == 0)
        return 1;
    return median + (stdDev * 2);
}

From source file:knop.psfj.FovDataSet.java

/**
 * Gets the column median./*from w  w w. j  av  a2 s.  c  o  m*/
 *
 * @param column the column
 * @return the column median
 */
public String getColumnMedian(String column) {
    // TODO Auto-generated method stub
    DescriptiveStatistics stats = getColumnStatistics(column);
    //System.out.println("stddev : " + stats.getStandardDeviation());
    return "" + MathUtils.formatDouble(stats.getPercentile(50), getColumnUnit(column)) + " +/- "
            + MathUtils.formatDouble(stats.getStandardDeviation(), getColumnUnit(column));
}

From source file:gdsc.smlm.ij.plugins.PSFCreator.java

/**
 * Get the limits of the array ignoring outliers more than 1.5x the inter quartile range
 * //from   w w  w . jav a2 s  .  com
 * @param data
 * @return
 */
private double[] getLimits(double[] data) {
    double[] limits = Maths.limits(data);
    DescriptiveStatistics stats = new DescriptiveStatistics(data);
    double lower = stats.getPercentile(25);
    double upper = stats.getPercentile(75);
    double iqr = upper - lower;
    limits[0] = FastMath.max(lower - iqr, limits[0]);
    limits[1] = FastMath.min(upper + iqr, limits[1]);
    return limits;
}

From source file:com.alibaba.dubbo.demo.consumer.DemoAction.java

public void start() throws Exception {
    int threads = 100;

    final DescriptiveStatistics stats = new SynchronizedDescriptiveStatistics();

    DubboBenchmark.BenchmarkMessage msg = prepareArgs();
    final byte[] msgBytes = msg.toByteArray();

    int n = 1000000;
    final CountDownLatch latch = new CountDownLatch(n);

    ExecutorService es = Executors.newFixedThreadPool(threads);

    final AtomicInteger trans = new AtomicInteger(0);
    final AtomicInteger transOK = new AtomicInteger(0);

    long start = System.currentTimeMillis();
    for (int i = 0; i < n; i++) {
        es.submit(() -> {//from   w  w w  .  j av  a2s .com
            try {

                long t = System.currentTimeMillis();
                DubboBenchmark.BenchmarkMessage m = testSay(msgBytes);
                t = System.currentTimeMillis() - t;
                stats.addValue(t);

                trans.incrementAndGet();

                if (m != null && m.getField1().equals("OK")) {
                    transOK.incrementAndGet();
                }

            } catch (InterruptedException e) {
                e.printStackTrace();
            } finally {
                latch.countDown();
            }
        });
    }

    latch.await();

    start = System.currentTimeMillis() - start;

    System.out.printf("sent     requests    : %d\n", n);
    System.out.printf("received requests    : %d\n", trans.get());
    System.out.printf("received requests_OK : %d\n", transOK.get());
    System.out.printf("throughput  (TPS)    : %d\n", n * 1000 / start);

    System.out.printf("mean: %f\n", stats.getMean());
    System.out.printf("median: %f\n", stats.getPercentile(50));
    System.out.printf("max: %f\n", stats.getMax());
    System.out.printf("min: %f\n", stats.getMin());

    System.out.printf("99P: %f\n", stats.getPercentile(90));
}

From source file:cn.pku.sei.GHRC.MySpectralClusterer.java

/**
 * Generates a clusterer by the mean of spectral clustering algorithm.
 * //w w w  . j a va 2  s  .c  o  m
 * @param data
 *            set of instances serving as training data
 */
public void buildClusterer(final GHGraphBuilder data) {
    setData(data);
    final int n = data.getNodeNum();
    final DoubleMatrix2D w = useSparseMatrix ? DoubleFactory2D.sparse.make(n, n)
            : DoubleFactory2D.dense.make(n, n);
    /*
     * final double[][] v1 = new double[n][]; for (int i = 0; i < n; i++)
     * v1[i] = data.instance(i).toDoubleArray(); final DoubleMatrix2D v =
     * DoubleFactory2D.dense.make(v1);
     */
    final double sigma_sq = sigma * sigma;
    DescriptiveStatistics stats = new DescriptiveStatistics();
    stats.addValue(1);
    // Sets up similarity matrix
    try (Transaction tx = data.getGraphDb().beginTx()) {

        Iterator<Relationship> rels = data.getAllRelationships();
        int id1, id2;
        double score;
        double previousScore;
        while (rels.hasNext()) {
            Relationship rel = rels.next();
            score = GHRepository.getScore(rel);

            //            score = Math.exp(-(score * score) / (2 * sigma_sq));

            id1 = (int) rel.getStartNode().getId();
            id2 = (int) rel.getEndNode().getId();
            previousScore = w.get(id2, id1);
            w.set(id1, id2, score + previousScore);
            w.set(id2, id1, score + previousScore);
        }
        tx.success();
    }

    for (int i = 0; i < n; i++) {
        for (int j = 0; j < n; j++) {
            double s = w.get(i, j);
            if (s > 0) {
                stats.addValue(s);
            }
        }
    }

    double median = stats.getPercentile(persentile);
    System.out.println("-------------Sim Matrix--------------");

    for (int i = 0; i < n; i++) {
        for (int j = 0; j < n; j++) {
            double s = w.get(i, j) / median;
            s = s >= 1 ? 0.99 : s;
            w.set(i, j, s);
            System.out.print(w.get(i, j) + "\t");
        }
        w.set(i, i, 1);
        System.out.println();
    }

    System.out.println(median);

    // Compute point partitions
    final int[][] p = partition(w /* , alpha_star */);

    // Deploys results
    numOfClusters = p.length;
    cluster = new int[n];
    for (int i = 0; i < p.length; i++)
        for (int j = 0; j < p[i].length; j++)
            cluster[p[i][j]] = i;
}

From source file:gdsc.smlm.ij.plugins.pcpalm.PCPALMMolecules.java

/**
 * Calculate the average precision by fitting a skewed Gaussian to the histogram of the precision distribution.
 * <p>//from   w w  w .j a  va  2s  .co m
 * A simple mean and SD of the histogram is computed. If the mean of the Skewed Gaussian does not fit within 3 SDs
 * of the simple mean then the simple mean is returned.
 * 
 * @param molecules
 * @param title
 *            the plot title (null if no plot should be displayed)
 * @param histogramBins
 * @param logFitParameters
 *            Record the fit parameters to the ImageJ log
 * @param removeOutliers
 *            The distribution is created using all values within 1.5x the inter-quartile range (IQR) of the data
 * @return The average precision
 */
public double calculateAveragePrecision(ArrayList<Molecule> molecules, String title, int histogramBins,
        boolean logFitParameters, boolean removeOutliers) {
    // Plot histogram of the precision
    float[] data = new float[molecules.size()];
    DescriptiveStatistics stats = new DescriptiveStatistics();
    double yMin = Double.NEGATIVE_INFINITY, yMax = 0;
    for (int i = 0; i < data.length; i++) {
        data[i] = (float) molecules.get(i).precision;
        stats.addValue(data[i]);
    }

    // Set the min and max y-values using 1.5 x IQR 
    if (removeOutliers) {
        double lower = stats.getPercentile(25);
        double upper = stats.getPercentile(75);
        if (Double.isNaN(lower) || Double.isNaN(upper)) {
            if (logFitParameters)
                Utils.log("Error computing IQR: %f - %f", lower, upper);
        } else {
            double iqr = upper - lower;

            yMin = FastMath.max(lower - iqr, stats.getMin());
            yMax = FastMath.min(upper + iqr, stats.getMax());

            if (logFitParameters)
                Utils.log("  Data range: %f - %f. Plotting 1.5x IQR: %f - %f", stats.getMin(), stats.getMax(),
                        yMin, yMax);
        }
    }

    if (yMin == Double.NEGATIVE_INFINITY) {
        yMin = stats.getMin();
        yMax = stats.getMax();

        if (logFitParameters)
            Utils.log("  Data range: %f - %f", yMin, yMax);
    }

    float[][] hist = Utils.calcHistogram(data, yMin, yMax, histogramBins);

    Plot2 plot = null;
    if (title != null) {
        plot = new Plot2(title, "Precision", "Frequency");
        float[] xValues = hist[0];
        float[] yValues = hist[1];
        if (xValues.length > 0) {
            double xPadding = 0.05 * (xValues[xValues.length - 1] - xValues[0]);
            plot.setLimits(xValues[0] - xPadding, xValues[xValues.length - 1] + xPadding, 0,
                    Maths.max(yValues) * 1.05);
        }
        plot.addPoints(xValues, yValues, Plot2.BAR);
        Utils.display(title, plot);
    }

    // Extract non-zero data
    float[] x = Arrays.copyOf(hist[0], hist[0].length);
    float[] y = hist[1];
    int count = 0;
    float dx = (x[1] - x[0]) * 0.5f;
    for (int i = 0; i < y.length; i++)
        if (y[i] > 0) {
            x[count] = x[i] + dx;
            y[count] = y[i];
            count++;
        }
    x = Arrays.copyOf(x, count);
    y = Arrays.copyOf(y, count);

    // Sense check to fitted data. Get mean and SD of histogram
    double[] stats2 = Utils.getHistogramStatistics(x, y);
    double mean = stats2[0];
    if (logFitParameters)
        log("  Initial Statistics: %f +/- %f", stats2[0], stats2[1]);

    // Standard Gaussian fit
    double[] parameters = fitGaussian(x, y);
    if (parameters == null) {
        log("  Failed to fit initial Gaussian");
        return mean;
    }
    double newMean = parameters[1];
    double error = Math.abs(stats2[0] - newMean) / stats2[1];
    if (error > 3) {
        log("  Failed to fit Gaussian: %f standard deviations from histogram mean", error);
        return mean;
    }
    if (newMean < yMin || newMean > yMax) {
        log("  Failed to fit Gaussian: %f outside data range %f - %f", newMean, yMin, yMax);
        return mean;
    }

    mean = newMean;

    if (logFitParameters)
        log("  Initial Gaussian: %f @ %f +/- %f", parameters[0], parameters[1], parameters[2]);

    double[] initialSolution = new double[] { parameters[0], parameters[1], parameters[2], -1 };

    // Fit to a skewed Gaussian (or appropriate function)
    double[] skewParameters = fitSkewGaussian(x, y, initialSolution);
    if (skewParameters == null) {
        log("  Failed to fit Skewed Gaussian");
        return mean;
    }

    SkewNormalFunction sn = new SkewNormalFunction(skewParameters);
    if (logFitParameters)
        log("  Skewed Gaussian: %f @ %f +/- %f (a = %f) => %f +/- %f", skewParameters[0], skewParameters[1],
                skewParameters[2], skewParameters[3], sn.getMean(), Math.sqrt(sn.getVariance()));

    newMean = sn.getMean();
    error = Math.abs(stats2[0] - newMean) / stats2[1];
    if (error > 3) {
        log("  Failed to fit Skewed Gaussian: %f standard deviations from histogram mean", error);
        return mean;
    }
    if (newMean < yMin || newMean > yMax) {
        log("  Failed to fit Skewed Gaussian: %f outside data range %f - %f", newMean, yMin, yMax);
        return mean;
    }

    // Use original histogram x-axis to maintain all the bins
    if (plot != null) {
        x = hist[0];
        for (int i = 0; i < y.length; i++)
            x[i] += dx;
        plot.setColor(Color.red);
        addToPlot(plot, x, skewParameters, Plot2.LINE);

        plot.setColor(Color.black);
        Utils.display(title, plot);
    }

    // Return the average precision from the fitted curve
    return newMean;
}