Example usage for org.apache.commons.math.stat.descriptive.rank Percentile evaluate

List of usage examples for org.apache.commons.math.stat.descriptive.rank Percentile evaluate

Introduction

In this page you can find the example usage for org.apache.commons.math.stat.descriptive.rank Percentile evaluate.

Prototype

public double evaluate(final double[] values, final double p) 

Source Link

Document

Returns an estimate of the pth percentile of the values in the values array.

Usage

From source file:com.discursive.jccook.math.StatExample.java

public static void main(String[] args) {
    double[] values = new double[] { 2.3, 5.4, 6.2, 7.3, 23.3 };

    System.out.println("min: " + StatUtils.min(values));
    System.out.println("max: " + StatUtils.max(values));
    System.out.println("mean: " + StatUtils.mean(values));
    System.out.println("product: " + StatUtils.product(values));
    System.out.println("sum: " + StatUtils.sum(values));
    System.out.println("variance: " + StatUtils.variance(values));

    // Measures from previous example
    Min min = new Min();
    System.out.println("min: " + min.evaluate(values));
    Max max = new Max();
    System.out.println("max: " + max.evaluate(values));
    Mean mean = new Mean();
    System.out.println("mean: " + mean.evaluate(values));
    Product product = new Product();
    System.out.println("product: " + product.evaluate(values));
    Sum sum = new Sum();
    System.out.println("sum: " + sum.evaluate(values));
    Variance variance = new Variance();
    System.out.println("variance: " + variance.evaluate(values));

    // New measures
    Percentile percentile = new Percentile();
    System.out.println("80 percentile value: " + percentile.evaluate(values, 80.0));
    GeometricMean geoMean = new GeometricMean();
    System.out.println("geometric mean: " + geoMean.evaluate(values));
    StandardDeviation stdDev = new StandardDeviation();
    System.out.println("standard dev: " + stdDev.evaluate(values));
    Skewness skewness = new Skewness();
    System.out.println("skewness: " + skewness.evaluate(values));
    Kurtosis kurtosis = new Kurtosis();
    System.out.println("kurtosis: " + kurtosis.evaluate(values));

}

From source file:com.eviware.loadui.util.statistics.ValueStatistics.java

public synchronized Map<String, Number> getData(long timestamp) {
    long max = 0;
    long min = Long.MAX_VALUE;
    long sum = 0;

    for (Iterator<DataPoint> it = dataPoints.iterator(); it.hasNext();) {
        DataPoint dataPoint = it.next();
        if (dataPoint.timestamp < timestamp - period && period > 0)
            it.remove();//from   www.  java 2 s . c om
        else {
            sum += dataPoint.value;
            max = Math.max(max, dataPoint.value);
            min = Math.min(min, dataPoint.value);
        }
    }

    int count = dataPoints.size();
    double avg = count > 0 ? (double) sum / count : 0;

    double stdDev = 0;
    double[] dataSet = new double[count];
    if (count > 0) {
        int i = 0;
        for (DataPoint dataPoint : dataPoints) {
            dataSet[i] = dataPoint.value;
            i++;
            stdDev += Math.pow(dataPoint.value - avg, 2);
        }
        stdDev = Math.sqrt(stdDev / count);
    }

    double tps = 0;
    long vps = 0;
    long duration = 0;
    if (count >= 2) {
        int samples = 0;
        long earliest = timestamp - snapshotLength;
        DataPoint point = null;
        while (++samples < count) {
            point = dataPoints.get(count - samples);
            vps += point.value;
            if (point.timestamp < earliest)
                break;
        }

        long timeDelta = timestamp - Preconditions.checkNotNull(point).timestamp;

        timeDelta = timeDelta == 0 ? 1000 : timeDelta;

        vps = vps * 1000 / timeDelta;
        tps = (samples - 1) * 1000.0 / timeDelta;
        duration = dataPoints.get(count - 1).timestamp - dataPoints.get(0).timestamp;
    }

    Percentile perc = new Percentile(90);

    double percentile = perc.evaluate(dataSet, 90);

    return new ImmutableMap.Builder<String, Number>() //
            .put("Max", max) //
            .put("Min", min == Long.MAX_VALUE ? 0L : min) //
            .put("Avg", avg) //
            .put("Sum", sum) //
            .put("Std-Dev", stdDev) //
            .put("Tps", tps) //
            .put("Avg-Tps", duration > 0L ? 1000L * count / duration : 0) //
            .put("Vps", vps) //
            .put("Avg-Vps", duration > 0L ? 1000L * sum / duration : 0) //
            .put("Percentile", percentile) //
            .put("AvgResponseSize", 1000L * sum / (dataPoints.size() == 0 ? 1 : dataPoints.size())) //
            .build();
}

From source file:fr.ens.transcriptome.corsen.util.StatTest.java

public void testFirstQuartile() {

    Percentile percentile = new Percentile();

    for (int i = 0; i < 1000; i++) {

        List<DataDouble> list = generate();
        assertEquals(percentile.evaluate(Stats.toDouble(list), 25.0), Stats.firstQuartile(list));
    }//from   www . j a v  a2  s.  com
}

From source file:fr.ens.transcriptome.corsen.util.StatTest.java

public void testThirdQuartile() {

    Percentile percentile = new Percentile();

    for (int i = 0; i < 1000; i++) {

        List<DataDouble> list = generate();
        assertEquals(percentile.evaluate(Stats.toDouble(list), 75.0), Stats.thirdQuartile(list));
    }/* w w  w  .  j  a va2  s  .c o m*/
}

From source file:fr.ens.transcriptome.corsen.util.StatTest.java

public void testPercentile() {

    Percentile percentile = new Percentile();

    for (int j = 0; j < 10; j++) {
        for (int i = 0; i < 98; i++) {

            List<DataDouble> list = generate();
            double p = random.nextInt(99) + 1;

            assertEquals(percentile.evaluate(Stats.toDouble(list), p), Stats.percentile(list, p));

        }//from  ww w  . j a v a  2 s .  com
    }

}

From source file:edu.cornell.med.icb.goby.modes.CompactFileStatsMode.java

/**
 * Print statistics about a reads file in the Goby compact form.
 *
 * @param file The file to display statistics about
 * @throws IOException if the file cannot be read
 *//*  w  w  w.  ja  v a  2 s  . c  om*/
private void describeCompactReads(final File file) throws IOException {
    stream.printf("Compact reads filename = %s%n", file);

    // keep the read lengths for computing quantiles
    final DoubleArrayList readLengths = new DoubleArrayList();

    int minLength = Integer.MAX_VALUE;
    int maxLength = Integer.MIN_VALUE;

    int numberOfIdentifiers = 0;
    int numberOfDescriptions = 0;
    int numberOfSequences = 0;
    int numberOfSequencePairs = 0;
    int numberOfQualityScores = 0;
    int numberOfQualityScorePairs = 0;

    long totalReadLength = 0;
    long totalReadLengthPair = 0;
    final DistinctIntValueCounterBitSet allQueryIndices = new DistinctIntValueCounterBitSet();

    ReadsReader reader = null;
    boolean checkedForPaired = false;

    try {
        final long size = file.length();
        reader = new ReadsReader(FileUtils.openInputStream(file));
        for (final Reads.ReadEntry entry : reader) {
            final int readLength = entry.getReadLength();

            for (int i = 0; i < entry.getMetaDataCount(); i++) {
                Reads.MetaData metaData = entry.getMetaData(i);
                stream.printf("meta-data key=%s value=%s%n", metaData.getKey(), metaData.getValue());

            }

            // across this file
            allQueryIndices.observe(entry.getReadIndex());
            totalReadLength += readLength;
            totalReadLengthPair += entry.getReadLengthPair();

            // across all files
            numberOfReads++;
            numberOfDescriptions += entry.hasDescription() ? 1 : 0;
            cumulativeReadLength += readLength;

            if (verbose && entry.hasDescription()) {
                stream.println("Description found: " + entry.getDescription());
            }
            numberOfIdentifiers += entry.hasReadIdentifier() ? 1 : 0;
            if (verbose && entry.hasReadIdentifier()) {
                stream.printf("Identifier found: %s    /  size=%,d%n", entry.getReadIdentifier(), readLength);
            }
            numberOfSequences += entry.hasSequence() && !entry.getSequence().isEmpty() ? 1 : 0;
            final boolean samplePaired = entry.hasSequencePair() && !entry.getSequencePair().isEmpty();
            if (samplePaired) {
                numberOfSequencePairs += 1;
            }
            if (!checkedForPaired) {
                // Check only the very first entry.
                checkedForPaired = true;
                pairedSamples.add(samplePaired);
            }
            if (entry.hasQualityScores() && !entry.getQualityScores().isEmpty()) {
                numberOfQualityScores += 1;
                final int qualityLength = entry.getQualityScores().size();
                minQualityLength = Math.min(minQualityLength, qualityLength);
                maxQualityLength = Math.max(maxQualityLength, qualityLength);
            }

            numberOfQualityScorePairs += entry.hasQualityScoresPair() && !entry.getQualityScoresPair().isEmpty()
                    ? 1
                    : 0;

            // we only need to keep all the read lengths if quantiles are being computed
            if (computeQuantiles) {
                readLengths.add(readLength);
            }
            minLength = Math.min(minLength, readLength);
            maxLength = Math.max(maxLength, readLength);

            // adjust the min/max length of across all files
            minReadLength = Math.min(minReadLength, readLength);
            maxReadLength = Math.max(maxReadLength, readLength);
        }

        stream.printf("Average bytes per entry: %f%n", divide(size, allQueryIndices.count()));
        stream.printf("Average bytes per base: %f%n", divide(size, cumulativeReadLength));
    } finally {
        if (reader != null) {
            reader.close();
        }
    }
    final int numReadEntries = allQueryIndices.count();
    stream.printf("Has identifiers = %s (%,d) %n", numberOfIdentifiers > 0, numberOfIdentifiers);
    stream.printf("Has descriptions = %s (%,d) %n", numberOfDescriptions > 0, numberOfDescriptions);
    stream.printf("Has sequences = %s (%,d) %n", numberOfSequences > 0, numberOfSequences);
    stream.printf("Has sequencePairs = %s (%,d) %n", numberOfSequencePairs > 0, numberOfSequencePairs);
    stream.printf("Has quality scores = %s (%,d) %n", numberOfQualityScores > 0, numberOfQualityScores);
    stream.printf("Has quality score Pairs = %s (%,d) %n", numberOfQualityScorePairs > 0,
            numberOfQualityScorePairs);

    stream.printf("Number of entries = %,d%n", numReadEntries);
    stream.printf("Min read length = %,d%n", numReadEntries > 0 ? minLength : 0);
    stream.printf("Max read length = %,d%n", numReadEntries > 0 ? maxLength : 0);
    stream.printf("Min quality length = %,d%n", numberOfQualityScores > 0 ? minQualityLength : 0);
    stream.printf("Max quality length = %,d%n", numberOfQualityScores > 0 ? maxQualityLength : 0);
    stream.printf("Avg read length = %,d%n", numReadEntries > 0 ? totalReadLength / numReadEntries : 0);
    stream.printf("Avg read pair length = %,d%n",
            numReadEntries > 0 ? totalReadLengthPair / numReadEntries : 0);

    // compute quantiles
    if (computeQuantiles) {
        final Percentile percentile = new Percentile();
        final double[] increasingReadLengths = readLengths.toDoubleArray();
        Arrays.sort(increasingReadLengths);
        stream.printf("Read length quantiles = [ ");
        for (int quantile = 1; quantile < numberOfQuantiles + 1; quantile++) {
            stream.printf("%,f ", percentile.evaluate(increasingReadLengths, quantile));
        }
        stream.printf("]%n");
    }
}

From source file:org.drugepi.hdps.ZBiasCalculator.java

public static void scoreVariables(List<HdpsVariable> variableList) {
    // copy variables list
    List<HdpsVariable> expSortVariableList = new ArrayList<HdpsVariable>();
    List<HdpsVariable> outcomeSortVariableList = new ArrayList<HdpsVariable>();

    for (HdpsVariable var : variableList) {
        var.zBiasScore = 0;

        if ((var.expAssocRankingVariable != HdpsVariable.INVALID)
                && (var.outcomeAssocRankingVariable != HdpsVariable.INVALID)) {
            expSortVariableList.add(var);
            outcomeSortVariableList.add(var);
        }/*from   www.  j  a  v a 2 s .  c o m*/
    }

    // sort variables by exposure association (strongest first) 
    Collections.sort(expSortVariableList, new HdpsVariableReverseExposureAssociationComparator());

    // sort variables by outcome association (weakest first) 
    Collections.sort(outcomeSortVariableList, new HdpsVariableReverseOutcomeAssociationComparator());
    Collections.reverse(outcomeSortVariableList);

    // create an array of outcome strengths
    double[] outcomeStrengths = new double[outcomeSortVariableList.size()];
    for (int i = 0; i < outcomeStrengths.length; i++)
        outcomeStrengths[i] = outcomeSortVariableList.get(i).outcomeAssocRankingVariable;

    // array that will store breaks between deciles
    // find the median of outcome strength 
    Percentile pctile = new Percentile();

    // Find quintiles 1 through 5 of outcome weakness
    // AMONG the weakest half of the variables.
    // List is sorted strongest first, so the weakest variables 
    // will be at the end
    // quintile 1 = weakest 
    // don't use startsOfQuintile[0]

    double median = pctile.evaluate(outcomeStrengths, 50.0);
    int searchCeiling = Arrays.binarySearch(outcomeStrengths, median);
    if (searchCeiling < 0)
        searchCeiling = -(searchCeiling + 1);

    int startsOfQuintile[] = new int[7];

    for (int quintile = 1; quintile <= 5; quintile++) {
        // find the probability that *begins* this quintile
        double p = (quintile - 1) * 20;
        if (p > 0) {
            double quintileStartP = pctile.evaluate(outcomeStrengths, 0, searchCeiling, (quintile - 1) * 20);

            startsOfQuintile[quintile] = Arrays.binarySearch(outcomeStrengths, quintileStartP);
            if (startsOfQuintile[quintile] < 0)
                startsOfQuintile[quintile] = -(startsOfQuintile[quintile] + 1);
        } else
            startsOfQuintile[quintile] = 0;
    }
    startsOfQuintile[6] = searchCeiling;

    // score the variables, BUT make quintile 5 the weakest
    for (int quintile = 1; quintile <= 5; quintile++) {
        for (int i = startsOfQuintile[quintile]; i < startsOfQuintile[quintile + 1]; i++) {
            HdpsVariable v = outcomeSortVariableList.get(i);
            v.zBiasScore = 6 - quintile;
        }
    }

    //      for (HdpsVariable v: outcomeSortVariableList) {
    //         System.out.printf("%s    %1.4f    %d\n", v.varName, v.outcomeAssocRankingVariable, v.zBiasScore);
    //      }
}