Example usage for org.apache.commons.math3.distribution TDistribution inverseCumulativeProbability

List of usage examples for org.apache.commons.math3.distribution TDistribution inverseCumulativeProbability

Introduction

In this page you can find the example usage for org.apache.commons.math3.distribution TDistribution inverseCumulativeProbability.

Prototype

public double inverseCumulativeProbability(final double p) throws OutOfRangeException 

Source Link

Document

The default implementation returns
  • #getSupportLowerBound() for p = 0 ,
  • #getSupportUpperBound() for p = 1 .

Usage

From source file:edu.jhuapl.bsp.detector.OpenMath.java

public static void main(String[] args) {
    //    double dt[] = {30, 10, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0};
    //    System.out.println (" "+percentile(dt, Double.parseDouble(args[0])));
    double dt[] = { 1, 2.33, 3, 4, 5, 6.33, 7, 8, 9, 10.01, 11, 12, 13, 14, 1, 1, 1, 1, 1, 1, 1 };
    System.out.println("median=" + median(median(reshape(dt, 3, 7))));
    System.out.println("mean=" + mean(mean(reshape(dt, 3, 7))));
    int Baseline = 56;
    double UCL_R[] = new double[Baseline], UCL_Y[] = new double[Baseline];
    for (int k = 0; k < Baseline; k++) {
        TDistribution tdist = new TDistribution(k + 1);
        UCL_R[k] = tdist.inverseCumulativeProbability(1 - 0.01);
        UCL_Y[k] = tdist.inverseCumulativeProbability(1 - 0.05);
    }// w  ww. java  2 s  .  c  o  m
    double[] alpha = { 0.4, 0, 0, 05 };
    int GUARDBAND = 0;
    double Term1 = alpha[0] / (2 - alpha[0]);
    double Term2[] = new double[Baseline];
    for (int k = 0; k < Term2.length; k++) {
        Term2[k] = 1 / (2 + k);
    }
    double Term3[] = new double[Baseline];
    for (int k = 0; k < Term3.length; k++) {
        Term3[k] = -2 * pow(1 - alpha[0], GUARDBAND + 1) * (1 - pow(1 - alpha[0], 2 + k)) / (2 + k);
    }
    double[] sigmaCoeff = new double[Baseline];
    for (int k = 0; k < Baseline; k++) {
        sigmaCoeff[k] = Math.sqrt(Term1 + Term2[k] + Term3[k]);
    }
    double[] deltaSigma = new double[Baseline];
    for (int k = 0; k < Baseline; k++) {
        deltaSigma[k] = (alpha[0] / UCL_Y[k])
                * (0.1289 - (0.2414 - 0.1826 * pow(1 - alpha[0], 4)) * log(10 * 0.05));
    }
    for (int k = 0; k < Baseline; k++) {
        System.out.println(String.format("%.3f %.3f %.3f %.3f %.3f", UCL_R[k], UCL_Y[k], Term2[k], Term3[k],
                sigmaCoeff[k]));
    }

}

From source file:info.debatty.jinu.SummaryStatistics.java

private static double calcMeanCI(final SummaryStatistics stats, final double level) {

    try {//from w ww  .  jav  a2s  . c om
        // Create T Distribution with N-1 degrees of freedom
        TDistribution t_dist = new TDistribution(stats.getN() - 1);
        // Calculate critical value
        double crit_val = t_dist.inverseCumulativeProbability(1.0 - (1 - level) / 2);
        // Calculate confidence interval
        return crit_val * stats.getStandardDeviation() / Math.sqrt(stats.getN());
    } catch (MathIllegalArgumentException e) {
        return Double.NaN;
    }
}

From source file:net.recommenders.rival.evaluation.statistics.ConfidenceInterval.java

/**
 * Adapted from https://gist.github.com/gcardone/5536578.
 *
 * @param alpha probability of incorrectly rejecting the null hypothesis (1
 * - confidence_level)//from  ww w  . j a v a 2  s.c  o m
 * @param df degrees of freedom
 * @param n number of observations
 * @param std standard deviation
 * @param mean mean
 * @return array with the confidence interval: [mean - margin of error, mean
 * + margin of error]
 */
public static double[] getConfidenceInterval(final double alpha, final int df, final int n, final double std,
        final double mean) {
    // Create T Distribution with df degrees of freedom
    TDistribution tDist = new TDistribution(df);
    // Calculate critical value
    double critVal = tDist.inverseCumulativeProbability(1.0 - alpha);
    // Calculate confidence interval
    double ci = critVal * std / Math.sqrt(n);
    double lower = mean - ci;
    double upper = mean + ci;
    double[] interval = new double[] { lower, upper };
    return interval;
}

From source file:de.bund.bfr.math.MathUtils.java

public static double get95PercentConfidence(int degreesOfFreedom) {
    TDistribution dist = new TDistribution(degreesOfFreedom);

    return dist.inverseCumulativeProbability(1.0 - 0.05 / 2.0);
}

From source file:es.logongas.encuestas.modelo.resultados.InferenciaEstadistica.java

public InferenciaEstadistica(EstadisticaDescriptiva estadisticaDescriptiva, BigDecimal nivelConfianza,
        int numDecimals) {
    this.numDecimals = numDecimals;
    if (nivelConfianza.compareTo(BigDecimal.ZERO) <= 0) {
        throw new IllegalArgumentException("El nivelConfianza debe ser mayor que 0");
    }//from  w  ww . j a va 2s.  com
    if (nivelConfianza.compareTo(BigDecimal.ONE) >= 0) {
        throw new IllegalArgumentException("El nivelConfianza debe ser menor que 1");
    }

    TDistribution tDistribution = new TDistribution(estadisticaDescriptiva.getNumMuestras() - 1);
    double t = tDistribution.inverseCumulativeProbability(nivelConfianza.doubleValue());
    BigDecimal delta = new BigDecimal(t * (estadisticaDescriptiva.getDesviacionEstandar().doubleValue()
            / Math.sqrt(estadisticaDescriptiva.getNumMuestras())));

    BigDecimal min = estadisticaDescriptiva.getMedia().subtract(delta).setScale(this.numDecimals,
            RoundingMode.HALF_UP);
    BigDecimal max = estadisticaDescriptiva.getMedia().add(delta).setScale(this.numDecimals,
            RoundingMode.HALF_UP);
    intervaloConfianzaMedia = new IntervaloConfianza(min, max, nivelConfianza);

}

From source file:com.fay.statics.SummaryStat.java

public double confIntRangeSingle(double errorProb) {
    if (numObs <= 2)
        return Double.NaN;

    double deg = weightSum() - 1.0d;
    TDistribution dist = new TDistribution(deg);
    return Math.abs(dist.inverseCumulativeProbability(errorProb * 0.5d)) * Math.sqrt(variance() / weightSum());
}

From source file:edu.jhuapl.bsp.detector.EWMASagesDetector.java

private void calculate(double omega) {
    UCL_R = new double[degFreedomRange];
    UCL_Y = new double[degFreedomRange];
    sigmaCoeff = new double[degFreedomRange];
    deltaSigma = new double[degFreedomRange];
    minSigma = new double[degFreedomRange];
    int[] degFreedom = new int[data.length];
    ///*ww  w.  jav a 2s .co m*/
    double term1 = omega / (2.0 - omega), term2, term3;
    for (int i = 0; i < degFreedomRange; i++) {
        TDistribution tdist = new TDistribution(i + 1);
        UCL_R[i] = tdist.inverseCumulativeProbability(1 - threshPValueR);
        UCL_Y[i] = tdist.inverseCumulativeProbability(1 - threshPValueY);
        int numBaseline = NUM_FIT_PARAMS + i;
        term2 = 1.0 / numBaseline;
        term3 = -2.0 * Math.pow((1 - omega), (numGuardBand + 1.0)) * (1.0 - Math.pow((1 - omega), numBaseline))
                / numBaseline;
        sigmaCoeff[i] = Math.sqrt(term1 + term2 + term3);
        deltaSigma[i] = (omega / UCL_Y[i])
                * (0.1289 - (0.2414 - 0.1826 * Math.pow((1 - omega), 4)) * Math.log(10.0 * threshPValueY));
        minSigma[i] = (omega / UCL_Y[i]) * (1.0 + 0.5 * Math.pow((1 - omega), 2));
    }
    //
    levels = new double[data.length];
    Arrays.fill(levels, 0.5);
    pvalues = new double[data.length];
    Arrays.fill(pvalues, 0.5);
    expectedData = new double[data.length];
    Arrays.fill(expectedData, 0);
    colors = new double[data.length];
    r2Levels = new double[data.length];
    Arrays.fill(r2Levels, 0);
    switchFlags = new double[data.length];
    Arrays.fill(switchFlags, 0);
    test_stat = new double[data.length];
    Arrays.fill(test_stat, 0);
    //
    FilterBaselineZeros3 zf = new FilterBaselineZeros3();
    //
    double smoothedData, sigma, testBase[], baselineData[];
    ArrayList<Integer> ndxBaseline = new ArrayList<Integer>();
    // initialize the smoothed data
    smoothedData = 0;
    for (int i = 1; i < minBaseline + numGuardBand && i < data.length; i++) {
        smoothedData = omega * data[i] + (1 - omega) * smoothedData;
    }
    // initialize the indices of the baseline period
    for (int i = 0; i < minBaseline - 1; i++) {
        ndxBaseline.add(new Integer(i));
    }
    // loop through the days on which to make predictions
    for (int i = minBaseline + numGuardBand; i < data.length; i++) {
        // smooth the data using an exponentially weighted moving average (EWMA)
        smoothedData = omega * data[i] + (1 - omega) * smoothedData;
        // lengthen and advance the baseline period
        if (ndxBaseline.isEmpty() || ndxBaseline.get(ndxBaseline.size() - 1) + 1 < maxBaseline) {
            ndxBaseline.add(0, -1);
        }
        // advance the indices of the baseline period
        arrayAdd(ndxBaseline, 1);
        // remove excess consecutive zeros from the baseline data
        testBase = dataInd(data, ndxBaseline);
        if (removeZeros && FilterBaselineZeros3.filterBaselineZerosTest(testBase)) {
            int[] ndxOK = zf.filterBaselineZeros(testBase);
            baselineData = dataInd(testBase, ndxOK);
        } else {
            baselineData = testBase;
        }
        // check the baseline period is filled with zeros; no prediction can be
        if (!any(baselineData)) {
            continue;
        }
        // the number of degrees of freedom
        degFreedom[i] = baselineData.length - NUM_FIT_PARAMS;
        // there are not enough data points in the baseline period; no prediction can be made
        if (degFreedom[i] < MIN_DEG_FREEDOM) {
            continue;
        }
        // the predicted current value of the data
        expectedData[i] = mean(baselineData);
        // calculate the test statistic
        // the adjusted standard deviation of the baseline data
        sigma = sigmaCoeff[degFreedom[i] - 1] * std(baselineData) + deltaSigma[degFreedom[i] - 1];
        // don't allow values smaller than MinSigma
        sigma = Math.max(sigma, minSigma[degFreedom[i] - 1]);
        // the test statistic
        test_stat[i] = (smoothedData - expectedData[i]) / sigma;
        if (Math.abs(test_stat[i]) > UCL_R[degFreedom[i] - 1]) {
            // the current value of the smoothed data is too extreme; adjust this value for the next iteration
            smoothedData = expectedData[i] + Math.signum(test_stat[i]) * UCL_R[degFreedom[i] - 1] * sigma;
        }
    }
    for (int i = 0; i < data.length; i++) {
        if (Math.abs(test_stat[i]) > 0.0) {
            TDistribution tdist = new TDistribution(degFreedom[i]);
            pvalues[i] = 1 - tdist.cumulativeProbability(test_stat[i]);
            levels[i] = pvalues[i];
        }
    }
}

From source file:edu.jhuapl.bsp.detector.GSSages.java

private void calculate() {
    FilterBaselineZeros3 zf = new FilterBaselineZeros3();
    boolean bSparseFlag;
    int i;/*  ww  w.  j  ava 2s .c  o  m*/
    double ck, c0[], ytemp[] = new double[0];
    double UCL_R[], UCL_Y[], sigmaCoeff[], deltaSigma[], Sigma = 0, minSigma[];
    UCL_R = new double[Baseline];
    UCL_Y = new double[Baseline];
    int degFreedom;
    minSigma = new double[Baseline];
    //
    levels = ones(data.length, 0.5);
    pvalues = ones(data.length, -9999);
    expectedData = ones(data.length, 0);
    colors = new double[data.length];
    r2Levels = ones(data.length, 0);
    switchFlags = ones(data.length, 0);
    test_stat = ones(data.length, -9999);
    //
    double datak[] = copya(data);
    //    double datakr[][] = reshape (datak, Baseline/7, 7);
    //    c0 = mean(datakr); ck = mean(mean(datakr));
    double datakr[][] = reshape(datak, 7, Baseline / 7);
    c0 = mean(transpose(datakr));
    ck = mean(mean(transpose(datakr)));
    for (int n0 = 0; n0 < c0.length; n0++) {
        c0[n0] = c0[n0] / ck;
    } // starting seasonality coefficients
    //for (int k=0; k<c0.length; k++) { System.out.println (ck+"/"+c0[k]+" "); } System.out.println(); System.out.println();
    double m0 = mean(mean(transpose(datakr))); // starting level (mean)
    //
    double m[] = ones(data.length, m0);
    double b[] = ones(data.length, 0); // initialize trend at 0
    // Depending on the mean of the baseline choose smoothing coefficients
    // alpha is a vector with 3 coefficients - alpha(1) - level alpha(2)- trend
    // alpha(3) is seasonality (corresponds to alpha beta and gamma in the paper
    if (median(reshape(datakr)) == 0) {
        alpha[0] = 0.4;
        alpha[1] = 0;
        alpha[2] = 0;
        c0 = ones(7, 1);
        Adj = 0.2;
        bSparseFlag = true;
    } else {
        if (bAutoCoef == true) {
            if (m0 < 1) {
                alpha[0] = 0.05;
                alpha[1] = 0;
                alpha[2] = 0.1;
            } else if (m0 < 10) {
                alpha[0] = 0.1;
                alpha[1] = 0;
                alpha[2] = 0.05;
            } else if (m0 < 100) {
                alpha[0] = 0.15;
                alpha[1] = 0;
                alpha[2] = 0.05;
            } else /*(m0 >= 100)*/ {
                alpha[0] = 0.4;
                alpha[1] = 0;
                alpha[2] = 0.05;
            }
        }
        bSparseFlag = false;
    }
    int HOL[] = CheckHoliday.isHoliday(startDate, data.length); // Holiday function
    // Format of the parameterList:
    // HOL - vector of holidays
    final int season = 7; // Seasonality
    double y[] = copya(datak);
    //
    final int b0 = 0; // No trend
    // Initialize values
    m[season - 1] = m0;
    b[season - 1] = b0;
    b[2 * season - 1] = b0;
    double c[] = ones(data.length, 1);
    arrayAdd(y, Adj); // add adjustment to the input series
    for (i = 0; i < 7; i++) {
        c[i] = max(c0[i], 0.01);
    } // make sure seasonality coefficients are not 0s
    for (i = 7; i < 14; i++) {
        c[i] = c[i - 7];
    } // initialize seasonality coefficient vector
    //for (int k=0; k<14; k++) { System.out.println (c[k]+" "); } System.out.println(); System.out.println();
    m[2 * season - 1] = m0; // initialize vector of levels
    double denom[] = ones(y.length, 0);
    //
    double y_Pred[] = ones(y.length, 0); // initialize predictions
    int ndxBaseline[] = new int[14];
    for (i = 0; i < 14; i++) {
        ndxBaseline[i] = i + 1;
    } // starting baseline
    //
    for (i = 2 * season + GUARDBAND; i < y.length; i++) { // beginning at day 15 + Guardband
        // use the indices of the entire baseline period
        // checking that there are at least 7 non-zero values "together"
        int ndxOK[] = zf.filterBaselineZeros(dataInd_js(datak, ndxBaseline));
        int ndxBaselineOK[] = dataInd(ndxBaseline, ndxOK);
        if (numel(ndxBaselineOK) >= 7) {
            if (HOL[i] == 1 && !((y[i] < (c[i - 6] * m[i - 6] + denom[i - 1])
                    && y[i] > (c[i - 6] * m[i - 6] - denom[i - 1])) || HOLfac == 1.0)) {
                // if holiday - check if the values within reasonable limits (+/- 1 standard deviation from the mean)
                multFac = HOLfac; // potentially change to use seasonality parameter from the last weekend
            } else {
                multFac = c[i - season];
            }
            // If mean of the recent "good" data all of a sudden is greater
            // than 5 - start updating seasonal coefficients;
            //System.out.println (datak.length+" "+ndxBaselineOK.length+" "+datakr.length+" "+datakr[0].length+" "+ndxBaseline[ndxBaseline.length-1]);
            if ((mean(dataInd_js(datak, ndxBaselineOK)) >= 5) && (median(reshape(datakr)) == 0)) {
                //System.out.println ("1");
                alpha[2] = 0.05;
                bSparseFlag = false;
                if ((numel(ndxBaselineOK) >= 14) && (ndxBaselineOK[ndxBaselineOK.length - 1]
                        - ndxBaselineOK[ndxBaselineOK.length - 1 - 13] == 13)) {
                    datakr = reshape(dataInd_js(y,
                            dataVec(ndxBaselineOK, ndxBaselineOK.length - 1 - 13, ndxBaselineOK.length - 1)), 7,
                            2);
                    c0 = mean(transpose(datakr));
                    ck = mean(mean(transpose(datakr)));
                    for (int k = i - season, n0 = 0; k < i; k++, n0++) {
                        c[k] = c0[n0] / ck;
                    }
                }
            }
            // Updating of parameters
            m[i] = alpha[0] * y[i] / c[i - season] + (1 - alpha[0]) * (m[i - 1] + b[i - 1]);
            // Level for counts cannot become negative
            m[i] = max(m[i], 0.5);
            b[i] = alpha[1] * (m[i] - m[i - 1]) + (1 - alpha[1]) * b[i - 1];
            y_Pred[i] = max(multFac * (m[i] + GUARDBAND * b[i]), 0); // prediction
            if (m[i - 1] == 0) {
                c[i] = alpha[2] * y[i] + (1 - alpha[2]) * c[i - season];
            } else {
                c[i] = alpha[2] * y[i] / m[i - 1] + (1 - alpha[2]) * c[i - season];
            }
        } else { // In case baseline is not complete keep the old parameters
            m[i] = m[i - 1];
            b[i] = b[i - 1];
            c[i] = c[i - season];
        }
        //
        int memList2[] = findLT(arrayAbs(arrayAdd2(dataVec(c, i - 6, i), -c[i])), 0.1);
        int memList3[] = arrayMod(arrayAdd2(memList2, i + 2), 7);
        int memList[] = ismember(arrayMod(ndxBaselineOK, 7), memList3);
        int HOLlist[] = dataInd_js(HOL, ndxBaselineOK);
        for (int k = 0; k < ndxBaselineOK.length; k++) {
            if (HOLlist[k] == 1) {
                memList[k] = 0;
            }
        }
        memList = find(memList);
        if (numel(dataInd_js(y, dataInd(ndxBaseline, memList))) <= 4) {
            //System.out.println ("2");
            denom[i] = std(dataInd_js(y, ndxBaseline));
        } else {
            //System.out.println ("3");
            denom[i] = std(dataInd_js(y, dataInd(ndxBaselineOK, memList)));
        }
        // For EWMA switch
        // the term due to the smoothed data
        if (bSparseFlag) {
            for (int k = 0; k < Baseline; k++) {
                TDistribution tdist = new TDistribution(k + 1);
                UCL_R[k] = tdist.inverseCumulativeProbability(1 - 0.01);
                UCL_Y[k] = tdist.inverseCumulativeProbability(1 - 0.05);
            }
            degFreedom = numel(ndxBaselineOK) - 1;
            double Term1 = alpha[0] / (2.0 - alpha[0]);
            // the term due to the baseline mean
            double Term2[] = new double[Baseline];
            for (int k = 0; k < Term2.length; k++) {
                Term2[k] = 1.0 / (2.0 + k);
            }
            // the term due to twice their covariance
            double Term3[] = new double[Baseline];
            for (int k = 0; k < Term3.length; k++) {
                Term3[k] = -2 * pow(1 - alpha[0], GUARDBAND + 1) * (1 - pow(1 - alpha[0], 2.0 + k)) / (2.0 + k);
            }
            // the correction factor for sigma
            sigmaCoeff = new double[Baseline];
            for (int k = 0; k < Baseline; k++) {
                sigmaCoeff[k] = Math.sqrt(Term1 + Term2[k] + Term3[k]);
            }
            deltaSigma = new double[Baseline];
            for (int k = 0; k < Baseline; k++) {
                deltaSigma[k] = (alpha[0] / UCL_Y[k])
                        * (0.1289 - (0.2414 - 0.1826 * pow(1 - alpha[0], 4)) * log(10 * 0.05)); // hard-coded yellow threshold to 0.05
            }
            if (!any(dataInd_js(y, ndxBaselineOK))) {
                Sigma = 0;
            } else {
                Sigma = sigmaCoeff[degFreedom - 1] * std(dataInd_js(y, ndxBaselineOK))
                        + deltaSigma[degFreedom - 1];
                for (int k = 0; k < Baseline; k++) {
                    minSigma[k] = (alpha[0] / UCL_Y[k]) * (1 + 0.5 * (1 - alpha[0]) * (1 - alpha[0]));
                }
                Sigma = max(Sigma, minSigma[degFreedom - 1]);
            }
        }
        //
        denom[i] = max(denom[i], 0.5);
        // making sure that denominator is big enough
        // Don't update the mean if seasonal coefficient is small, or
        // there is a drastic jump in the mean
        if (c[i] < 0.05 || m[i] / m[i - 1] > 10) {
            m[i] = m[i - 1];
        }
        // Don't update parameters if
        // 1) prediction error is too big
        // 2) prediction is negative
        // 3) Holiday
        // 4) Day after holiday
        if (((abs(y_Pred[i] - y[i] + Adj) / denom[i] > APE_LIMIT)
                && (numel(ndxBaseline) == numel(ndxBaselineOK))
                && (y[i] > c[8 + (i % 7) - 1] * OpenMath.percentile(dataInd_js(y, ndxBaseline), 95.0)))
                || HOL[i] == 1) {
            m[i] = m[i - 1];
            b[i] = b[i - 1];
            c[i] = c[i - season];
        }
        test_stat[i] = (y[i] - y_Pred[i] - Adj) / denom[i]; // Calculating the test statistics(removing the adjustment added on line 69
        if (bSparseFlag) {
            ytemp = dataInd_js(y, ndxBaselineOK);
            Sigma = Math.max(Sigma, 0.5);
            test_stat[i] = (m[i] - mean(ytemp) + Adj) / Sigma;
        }
        if ((y[i] - Adj) == 0) { // if value is 0 to begin with - return 0 for the statistic
            test_stat[i] = 0;
        }
        if (ndxBaselineOK.length == 0) {
            test_stat[i] = 0;
        }
        pvalues[i] = 1 - normcdf(test_stat[i], 0, 1); // Using Gaussian (normal) 0,1 distribution table value
        if (ndxBaseline[ndxBaseline.length - 1] < Baseline) {
            // increase baseline vector
            int bt[] = ndxBaseline;
            ndxBaseline = new int[bt.length + 1];
            ndxBaseline[0] = 0;
            for (int k = 1; k < ndxBaseline.length; k++) {
                ndxBaseline[k] = bt[k - 1];
            }
        }
        arrayAdd(ndxBaseline, 1); // go forward by one day
        //System.out.println (String.format ("%.4f %.4f %.4f %.4f %.4f %.4f %.4f %.4f %.4f",
        //  denom[i], pvalues[i], m[i], c[i], test_stat[i], y_Pred[i], (bSparseFlag?1.0:0.0), m0, multFac));
    }
    arrayAdd(y_Pred, -Adj); // remove adjustment from prediction
    //
    for (i = 0; i < data.length; i++) {
        levels[i] = pvalues[i];
    }
    expectedData = y_Pred;
}

From source file:experiment.SimpleRegression_bug.java

/**
 * Returns the half-width of a (100-100*alpha)% confidence interval for
 * the slope estimate.//from ww  w .jav  a2s. c  om
 * <p>
 * The (100-100*alpha)% confidence interval is </p>
 * <p>
 * <code>(getSlope() - getSlopeConfidenceInterval(),
 * getSlope() + getSlopeConfidenceInterval())</code></p>
 * <p>
 * To request, for example, a 99% confidence interval, use
 * <code>alpha = .01</code></p>
 * <p>
 * <strong>Usage Note</strong>:<br>
 * The validity of this statistic depends on the assumption that the
 * observations included in the model are drawn from a
 * <a href="http://mathworld.wolfram.com/BivariateNormalDistribution.html">
 * Bivariate Normal Distribution</a>.</p>
 * <p>
 * <strong> Preconditions:</strong><ul>
 * <li>If there are fewer that <strong>three</strong> observations in the
 * model, or if there is no variation in x, this returns
 * <code>Double.NaN</code>.
 * </li>
 * <li><code>(0 < alpha < 1)</code>; otherwise an
 * <code>OutOfRangeException</code> is thrown.
 * </li></ul></p>
 *
 * @param alpha the desired significance level
 * @return half-width of 95% confidence interval for the slope estimate
 * @throws OutOfRangeException if the confidence interval can not be computed.
 */
public double getSlopeConfidenceInterval(final double alpha) throws OutOfRangeException {
    if (n < 3) {
        return Double.NaN;
    }
    if (alpha >= 1 || alpha <= 0) {
        throw new OutOfRangeException(LocalizedFormats.SIGNIFICANCE_LEVEL, alpha, 0, 1);
    }
    // No advertised NotStrictlyPositiveException here - will return NaN above
    TDistribution distribution = new TDistribution(n - 2);
    return getSlopeStdErr() * distribution.inverseCumulativeProbability(1d - alpha / 2d);
}

From source file:de.bund.bfr.knime.pmm.common.chart.Plotable.java

public double[][] getFunctionErrors(String paramX, String paramY, String unitX, String unitY, String transformX,
        String transformY, double minX, double maxX, double minY, double maxY, Map<String, Integer> choice)
        throws ConvertException {
    if (function == null) {
        return null;
    }/*  www  .  j ava 2s.  c o m*/

    if (!function.startsWith(paramY + "=") || !functionArguments.containsKey(paramX)) {
        return null;
    }

    double[][] points = new double[2][FUNCTION_STEPS];
    DJep parser = MathUtilities.createParser();
    Node f = null;
    List<String> paramList = new ArrayList<>(covariances.keySet());

    for (String param : functionParameters.keySet()) {
        if (functionParameters.get(param) == null) {
            return null;
        }

        parser.addConstant(param, functionParameters.get(param));
    }

    if (paramList.isEmpty()) {
        return null;
    }

    for (String param : paramList) {
        if (covariances.get(param) == null) {
            return null;
        }

        for (String param2 : paramList) {
            if (covariances.get(param).get(param2) == null) {
                return null;
            }
        }
    }

    for (String param : functionArguments.keySet()) {
        if (!param.equals(paramX)) {
            parser.addConstant(param, functionArguments.get(param).get(choice.get(param)));
        }
    }

    Map<String, Node> derivatives = new LinkedHashMap<>();

    parser.addVariable(paramX, 0.0);

    try {
        f = parser.parse(function.replace(paramY + "=", ""));

        for (String param : paramList) {
            derivatives.put(param, parser.differentiate(f, param));
        }
    } catch (ParseException e) {
        e.printStackTrace();
    }

    for (int n = 0; n < FUNCTION_STEPS; n++) {
        double x = minX + (double) n / (double) (FUNCTION_STEPS - 1) * (maxX - minX);

        parser.setVarValue(paramX, convertFromUnit(paramX, inverseTransform(x, transformX), unitX));

        try {
            Double y = 0.0;
            boolean failed = false;

            for (String param : paramList) {
                Object obj = parser.evaluate(derivatives.get(param));

                if (!(obj instanceof Double)) {
                    failed = true;
                    break;
                }

                y += (Double) obj * (Double) obj * covariances.get(param).get(param);
            }

            for (int i = 0; i < paramList.size() - 1; i++) {
                for (int j = i + 1; j < paramList.size(); j++) {
                    Object obj1 = parser.evaluate(derivatives.get(paramList.get(i)));
                    Object obj2 = parser.evaluate(derivatives.get(paramList.get(j)));

                    if (!(obj1 instanceof Double) || !(obj2 instanceof Double)) {
                        failed = true;
                        break;
                    }

                    double cov = covariances.get(paramList.get(i)).get(paramList.get(j));

                    y += 2.0 * (Double) obj1 * (Double) obj2 * cov;
                }
            }

            points[0][n] = x;

            if (!failed) {
                // 95% interval
                TDistribution dist = new TDistribution(degreesOfFreedom);

                y = Math.sqrt(y) * dist.inverseCumulativeProbability(1.0 - 0.05 / 2.0);
                y = convertToUnit(paramY, y, unitY);
                y = transform(y, transformY);

                if (y != null) {
                    points[1][n] = y;
                } else {
                    points[1][n] = Double.NaN;
                }
            } else {
                points[1][n] = Double.NaN;
            }
        } catch (ParseException e) {
            e.printStackTrace();
        } catch (ClassCastException e) {
            e.printStackTrace();
        }
    }

    return points;
}