Example usage for org.apache.commons.math3.distribution FDistribution inverseCumulativeProbability

List of usage examples for org.apache.commons.math3.distribution FDistribution inverseCumulativeProbability

Introduction

In this page you can find the example usage for org.apache.commons.math3.distribution FDistribution inverseCumulativeProbability.

Prototype

public double inverseCumulativeProbability(final double p) throws OutOfRangeException 

Source Link

Document

The default implementation returns
  • #getSupportLowerBound() for p = 0 ,
  • #getSupportUpperBound() for p = 1 .

Usage

From source file:com.itemanalysis.psychometrics.reliability.ReliabilityInterval.java

public double[] confidenceInterval() {
    double N = sampleSize;
    double nI = numberOfVariables;
    double df1 = N - 1.0;
    double df2 = (N - 1.0) * (nI - 1.0);
    double[] ci = new double[2];
    FDistribution fDist = new FDistribution(df1, df2);
    try {//from   w  ww. j  a  va  2  s . c  o m
        ci[0] = 1.0 - ((1.0 - reliability.value()) * fDist.inverseCumulativeProbability(0.975));
        ci[1] = 1.0 - ((1.0 - reliability.value()) * fDist.inverseCumulativeProbability(0.025));
    } catch (Exception ex) {
        ci[0] = Double.NaN;
        ci[1] = Double.NaN;
    }

    return ci;
}

From source file:com.itemanalysis.psychometrics.reliability.AbstractScoreReliability.java

public double[] confidenceInterval() {
    double numberOfExaminees = matrix.getMaxSampleSize();
    double[] confidenceInterval = new double[2];
    double numberOfItems = (double) nItems;
    double df1 = numberOfExaminees - 1.0;
    double df2 = (numberOfExaminees - 1.0) * (numberOfItems - 1.0);
    FDistribution fDist = new FDistribution(df1, df2);
    try {/*from   w  w w  . j  av  a2 s  .c  o m*/
        confidenceInterval[0] = 1.0 - ((1.0 - this.value()) * fDist.inverseCumulativeProbability(0.975));
        confidenceInterval[1] = 1.0 - ((1.0 - this.value()) * fDist.inverseCumulativeProbability(0.025));
    } catch (Exception ex) {
        confidenceInterval[0] = Double.NaN;
        confidenceInterval[1] = Double.NaN;
    }
    return confidenceInterval;
}

From source file:edu.cudenver.bios.power.glmm.GLMMTest.java

/**
 * Calculate the critical F value under the specified distribution
 *
 * @param type distribution type/* www.j a  v  a  2s  . com*/
 * @param alpha type I error level
 * @return critical F
 *
 */
public double getCriticalF(DistributionType type, double alpha) throws IllegalArgumentException {
    double ndf = getNumeratorDF(type);
    if (Double.isNaN(ndf)) {
        throw new IllegalArgumentException("numerator DF is NaN");
    }
    double ddf = getDenominatorDF(type);
    if (Double.isNaN(ddf)) {
        throw new IllegalArgumentException("denominator DF is NaN");
    }

    FDistribution centralFDist = new FDistribution(ndf, ddf);
    double fcrit = centralFDist.inverseCumulativeProbability(1 - alpha);
    return fcrit;
}

From source file:edu.stanford.cfuller.imageanalysistools.filter.VariableSizeMeanFilter.java

protected boolean shouldSubDivide(OcttreeNode node, Image im, Image laplacianFiltered) {

    im.setBoxOfInterest(node.getBoxMin(), node.getBoxMax());
    laplacianFiltered.setBoxOfInterest(node.getBoxMin(), node.getBoxMax());

    double l_sum = 0;
    double sum = 0;
    double count = 0;

    for (ImageCoordinate ic : im) {
        l_sum += laplacianFiltered.getValue(ic);
        sum += im.getValue(ic);/*w w  w  .  ja  v a2s.com*/
        count++;

    }

    if (count == 1)
        return false;

    l_sum /= count;
    sum /= count;

    double l_var = 0;
    double var = 0;

    for (ImageCoordinate ic : im) {

        l_var += Math.pow(laplacianFiltered.getValue(ic) - l_sum, 2);
        var += Math.pow(im.getValue(ic) - sum, 2);

    }

    l_var /= (count - 1);
    var /= (count - 1);

    im.clearBoxOfInterest();
    laplacianFiltered.clearBoxOfInterest();

    double cutoff = 0.0001;

    double smallerVar = var < l_var ? var : l_var;
    double largerVar = var > l_var ? var : l_var;
    try {

        FDistribution f = new FDistribution(count - 1, count - 1);
        double valueAtLowerCutoff = f.inverseCumulativeProbability(cutoff);
        double valueAtUpperCutoff = f.inverseCumulativeProbability(1 - cutoff);
        boolean result = (smallerVar / largerVar > valueAtUpperCutoff
                || smallerVar / largerVar < valueAtLowerCutoff);
        return result;

    } catch (MathIllegalArgumentException e) {
        LoggingUtilities.getLogger()
                .severe("Exception while calculating variable size mean QO partition: " + e.getMessage());
        e.printStackTrace();
        return false;
    }
}