Example usage for org.apache.commons.math3.distribution NormalDistribution inverseCumulativeProbability

List of usage examples for org.apache.commons.math3.distribution NormalDistribution inverseCumulativeProbability

Introduction

In this page you can find the example usage for org.apache.commons.math3.distribution NormalDistribution inverseCumulativeProbability.

Prototype

public double inverseCumulativeProbability(final double p) throws OutOfRangeException 

Source Link

Document

The default implementation returns
  • #getSupportLowerBound() for p = 0 ,
  • #getSupportUpperBound() for p = 1 .

Usage

From source file:eu.betaas.taas.securitymanager.taastrustmanager.taastrustcalculator.StatisticsCalculator.java

public boolean calculateRunsTest(double[] values) {
    double alpha = 0.05;
    double n1 = 0.0;
    double n2 = 0.0;
    double runs = 1.0;
    double median = StatUtils.percentile(values, 50);
    boolean positive = true;

    //Starting variable for calculating runs (positive or negative)
    if (values[0] < median) {
        positive = false;//from   w w  w  . ja  v a2 s. com
        n2++;
    } else {
        positive = true;
        n1++;
    }

    // Look for runs and count positive/negative values
    for (int i = 1; i < values.length; i++) {
        if (values[i] < median) {
            n2++;
            if (positive) {
                runs++;
                positive = false;
            }
        } else {
            n1++;
            if (!positive) {
                runs++;
                positive = true;
            }
        }
    }

    // Calculate Z value
    double expectedRuns = (2.0 * n1 * n2 / (n1 + n2)) + 1.0;
    double sR = Math
            .sqrt((2.0 * n1 * n2 * (2.0 * n1 * n2 - n1 - n2)) / (Math.pow((n1 + n2), 2) * (n1 + n2 - 1.0)));
    double Z = (runs - expectedRuns) / sR;

    logger.debug("Runs = " + runs);
    logger.debug("Positive values = " + n1);
    logger.debug("Negative values = " + n2);
    logger.debug("Expected Runs = " + expectedRuns);
    logger.debug("sR = " + sR);
    logger.debug("Z score = " + Z);

    if ((runs - expectedRuns) == 0.0) {
        //H1 -> Data was not produced in a random manner (because expected runs are ok)
        logger.debug("Runs = Expected Runs --> Not random data");
        return false;
    }

    // Calculate region of acceptance
    NormalDistribution myNormal = new NormalDistribution(0, 1);
    double myZRight = Math.abs(myNormal.inverseCumulativeProbability(1 - alpha / 2));

    logger.debug("Reject H0 if |Z|> " + myZRight);

    if (Math.abs(Z) > myZRight) {
        //H1 -> Data was not produced in a random manner
        return false;
    }

    //H0 -> Data was produced in a random manner
    return true;
}

From source file:com.addthis.hydra.data.tree.prop.DataReservoir.java

@VisibleForTesting
List<DataTreeNode> modelFitAnomalyDetection(long targetEpoch, int numObservations, boolean doubleToLongBits,
        boolean raw, double percentile, int minMeasurement) {
    int measurement;
    int count = 0;
    int min = Integer.MAX_VALUE;

    if (targetEpoch < 0) {
        return makeDefaultNodes(raw, targetEpoch, numObservations);
    } else if (numObservations <= 0) {
        return makeDefaultNodes(raw, targetEpoch, numObservations);
    } else if (reservoir == null) {
        return makeDefaultNodes(raw, targetEpoch, numObservations);
    } else if (targetEpoch < minEpoch) {
        return makeDefaultNodes(raw, targetEpoch, numObservations);
    } else if (targetEpoch >= minEpoch + reservoir.length) {
        return makeDefaultNodes(raw, targetEpoch, numObservations);
    } else if (numObservations > (reservoir.length - 1)) {
        return makeDefaultNodes(raw, targetEpoch, numObservations);
    }//from   w w  w.  j  a  v a 2 s.  c  om

    /**
     * Fitting to a geometric distribution uses the mean value of the sample.
     *
     * Fitting to a normal distribution uses the Apache Commons Math implementation.
     */
    double mean = 0.0;
    double m2 = 0.0;
    double stddev;
    double gaussianNegative = -1.0;
    Map<Integer, Integer> frequencies = new HashMap<>();
    double threshold;
    double measurePercentile = -100.0;

    int index = reservoir.length - 1;
    long currentEpoch = minEpoch + index;

    while (currentEpoch != targetEpoch) {
        index--;
        currentEpoch--;
    }

    measurement = reservoir[index--];
    currentEpoch--;

    while (count < numObservations && index >= 0) {
        int value = reservoir[index--];
        if (value < min) {
            min = value;
        }
        updateFrequencies(frequencies, value);
        count++;
        double delta = value - mean;
        mean += delta / count;
        m2 += delta * (value - mean);
    }

    while (count < numObservations) {
        int value = 0;
        if (value < min) {
            min = value;
        }
        updateFrequencies(frequencies, value);
        count++;
        double delta = value - mean;
        mean += delta / count;
        m2 += delta * (value - mean);
    }

    if (count < 2) {
        stddev = 0.0;
    } else {
        stddev = Math.sqrt(m2 / count);
    }

    int mode = -1;
    int modeCount = -1;

    for (Map.Entry<Integer, Integer> entry : frequencies.entrySet()) {
        int key = entry.getKey();
        int value = entry.getValue();
        if (value > modeCount || (value == modeCount && key > mode)) {
            mode = key;
            modeCount = value;
        }
    }

    if (mean > 0.0 && stddev > 0.0) {
        gaussianNegative = gaussianNegativeProbability(mean, stddev);
    }

    if (mean == 0.0) {
        threshold = 0.0;
    } else if (stddev == 0.0) {
        threshold = mean;
    } else if (mean > 1.0) {
        NormalDistribution distribution = new NormalDistribution(mean, stddev);
        double badProbability = distribution.cumulativeProbability(0.0);
        double goodProbability = badProbability + (1.0 - badProbability) * (percentile / 100.0);
        threshold = distribution.inverseCumulativeProbability(goodProbability);
        measurePercentile = distribution.probability(0.0, measurement) / (1.0 - badProbability) * 100.0;
    } else {
        double p = 1.0 / (1.0 + mean);
        GeometricDistribution distribution = new GeometricDistribution(p);
        threshold = distribution.inverseCumulativeProbability(percentile / 100.0);
        measurePercentile = distribution.cumulativeProbability(measurement) * 100.0;
    }

    List<DataTreeNode> result = new ArrayList<>();
    VirtualTreeNode vchild, vparent;

    if (measurement >= minMeasurement && (measurement > threshold || percentile == 0.0)) {
        vchild = new VirtualTreeNode("gaussianNegative", doubleToLong(gaussianNegative, doubleToLongBits));
        vparent = new VirtualTreeNode("percentile", doubleToLong(measurePercentile, doubleToLongBits),
                generateSingletonArray(vchild));
        vchild = vparent;
        vparent = new VirtualTreeNode("mode", mode, generateSingletonArray(vchild));
        vchild = vparent;
        vparent = new VirtualTreeNode("stddev", doubleToLong(stddev, doubleToLongBits),
                generateSingletonArray(vchild));
        vchild = vparent;
        vparent = new VirtualTreeNode("mean", doubleToLong(mean, doubleToLongBits),
                generateSingletonArray(vchild));
        vchild = vparent;
        vparent = new VirtualTreeNode("measurement", measurement, generateSingletonArray(vchild));
        vchild = vparent;
        vparent = new VirtualTreeNode("delta", doubleToLong(measurement - threshold, doubleToLongBits),
                generateSingletonArray(vchild));
        result.add(vparent);
        if (raw) {
            addRawObservations(result, targetEpoch, numObservations);
        }
    } else {
        makeDefaultNodes(raw, targetEpoch, numObservations);
    }
    return result;
}

From source file:edu.cmu.tetrad.data.DataUtils.java

public static DataSet getNonparanormalTransformed(DataSet dataSet) {
    final TetradMatrix data = dataSet.getDoubleData();
    final TetradMatrix X = data.like();
    final double n = dataSet.getNumRows();
    final double delta = 1.0 / (4.0 * Math.pow(n, 0.25) * Math.sqrt(Math.PI * Math.log(n)));

    final NormalDistribution normalDistribution = new NormalDistribution();

    double std = Double.NaN;

    for (int j = 0; j < data.columns(); j++) {
        final double[] x1 = data.getColumn(j).toArray();
        double std1 = StatUtils.sd(x1);
        double mu1 = StatUtils.mean(x1);
        double[] x = ranks(data, x1);

        for (int i = 0; i < x.length; i++) {
            x[i] /= n;/*  w  ww .j a  v  a2s . c om*/
            if (x[i] < delta)
                x[i] = delta;
            if (x[i] > (1. - delta))
                x[i] = 1. - delta;
            x[i] = normalDistribution.inverseCumulativeProbability(x[i]);
        }

        if (Double.isNaN(std)) {
            std = StatUtils.sd(x);
        }

        for (int i = 0; i < x.length; i++) {
            x[i] /= std;
            x[i] *= std1;
            x[i] += mu1;
        }

        X.assignColumn(j, new TetradVector(x));
    }
    return ColtDataSet.makeContinuousData(dataSet.getVariables(), X);
}

From source file:jeplus.JEPlusProject.java

private String[] defaultLHSdistributionSample(int n, String funcstr, int type, Random randomsrc) {
    // Trim off brackets
    int start = funcstr.indexOf("(") + 1;
    int end = funcstr.indexOf(")");
    funcstr = funcstr.substring(start, end).trim();

    ArrayList<String> list = new ArrayList<>();
    String[] params = funcstr.split("\\s*,\\s*");
    // For integer/double types, returns randomized N samples conforming
    // a specified distribution, currently 'gaussian'/'normal'/'n', 
    // 'uniform'/'u', 'triangular'/'tr', or 'discrete'/'d'
    // for examples: @sample(gaussian, 0, 1.5, 20), with mean, sd and N
    //           or  @sample(uniform, -10, 10, 20), with lb, ub and N
    //           of  @sample(triangular, -1.0, 0.3, 1.0, 20), with lb, mode, ub and N
    //           of  @sample(discrete, option_A, 0.3, option_B, 0.5, option_C, 0.2, 20), with lb, mode, ub and N
    String distribution = params[0].toLowerCase();
    switch (distribution) {
    case "uniform":
    case "u":
        // requires lb, ub, n
        double lb = Double.parseDouble(params[1]);
        double ub = Double.parseDouble(params[2]);
        for (int i = 0; i < n; i++) {
            if (type == ParameterItem.DOUBLE) {
                double bin = (ub - lb) / n;
                double v = randomsrc.nextDouble() * bin + lb + i * bin;
                list.add(Double.toString(v));
            } else if (type == ParameterItem.INTEGER) {
                double bin = (ub + 1. - lb) / n;
                double v = randomsrc.nextDouble() * bin + lb + i * bin;
                list.add(Integer.toString((int) Math.floor(v)));
            }/*from   w ww  .  j  av a  2s .co  m*/
        }
        break;
    case "gaussian":
    case "normal":
    case "n": {
        // requires mean, sd, n
        double mean = Double.parseDouble(params[1]);
        double sd = Double.parseDouble(params[2]);
        NormalDistribution Dist = new NormalDistribution(mean, sd);
        double bin = 1.0 / n;
        for (int i = 0; i < n; i++) {
            double a = Dist.inverseCumulativeProbability((i == 0) ? bin / 10 : i * bin); // lb of each bin
            double b = Dist.inverseCumulativeProbability((i == n - 1) ? 1. - bin / n : (i + 1) * bin); // ub of each bin
            double v = randomsrc.nextDouble() * (b - a) + a;
            if (type == ParameterItem.DOUBLE) {
                list.add(Double.toString(v));
            } else if (type == ParameterItem.INTEGER) {
                // Warning: for integer, binomial distribution should be used.
                // the following function is provided just for convenience
                list.add(Long.toString(Math.round(v)));
            }
        }
        break;
    }
    case "lognormal":
    case "ln": {
        // requires mean, sd, n
        double mean = Double.parseDouble(params[1]);
        double sd = Double.parseDouble(params[2]);
        LogNormalDistribution Dist = new LogNormalDistribution(mean, sd);
        double bin = 1.0 / n;
        for (int i = 0; i < n; i++) {
            double a = Dist.inverseCumulativeProbability((i == 0) ? bin / 10 : i * bin); // lb of each bin
            double b = Dist.inverseCumulativeProbability((i == n - 1) ? 1. - bin / n : (i + 1) * bin); // ub of each bin
            double v = randomsrc.nextDouble() * (b - a) + a;
            if (type == ParameterItem.DOUBLE) {
                list.add(Double.toString(v));
            } else if (type == ParameterItem.INTEGER) {
                // Warning: for integer, binomial distribution should be used.
                // the following function is provided just for convenience
                list.add(Long.toString(Math.round(v)));
            }
        }
        break;
    }
    case "exponential":
    case "e": {
        // requires mean, sd, n
        double mean = Double.parseDouble(params[1]);
        ExponentialDistribution Dist = new ExponentialDistribution(mean);
        double bin = 1.0 / n;
        for (int i = 0; i < n; i++) {
            double a = Dist.inverseCumulativeProbability((i == 0) ? bin / 10 : i * bin); // lb of each bin
            double b = Dist.inverseCumulativeProbability((i == n - 1) ? 1. - bin / n : (i + 1) * bin); // ub of each bin
            double v = randomsrc.nextDouble() * (b - a) + a;
            if (type == ParameterItem.DOUBLE) {
                list.add(Double.toString(v));
            } else if (type == ParameterItem.INTEGER) {
                // Warning: for integer, binomial distribution should be used.
                // the following function is provided just for convenience
                list.add(Long.toString(Math.round(v)));
            }
        }
        break;
    }
    case "triangular":
    case "tr": {
        // requires a(lb), c(mode), b(ub), n
        double a = Double.parseDouble(params[1]);
        double c = Double.parseDouble(params[2]);
        double b = Double.parseDouble(params[3]);
        TriangularDistribution Dist = new TriangularDistribution(a, c, b);
        double bin = 1.0 / n;
        for (int i = 0; i < n; i++) {
            a = Dist.inverseCumulativeProbability(i * bin); // lb of each bin
            b = Dist.inverseCumulativeProbability((i + 1) * bin); // ub of each bin
            double v = randomsrc.nextDouble() * (b - a) + a;
            if (type == ParameterItem.DOUBLE) {
                list.add(Double.toString(v));
            } else if (type == ParameterItem.INTEGER) {
                // Warning: for integer, user defined discrete distribution should be used.
                // the following function is provided just for convenience
                list.add(Long.toString(Math.round(v)));
            }
        }
        break;
    }
    case "discrete":
    case "d": {
        // requires op1, prob1, op2, prob2, ..., n
        int nOptions = params.length / 2 - 1;
        String[] options = new String[nOptions];
        double[] probabilities = new double[nOptions];
        double sum = 0;
        for (int i = 0; i < nOptions; i++) {
            options[i] = params[2 * i + 1];
            try {
                probabilities[i] = Double.parseDouble(params[2 * i + 2]);
            } catch (NumberFormatException nfe) {
                probabilities[i] = 0.1;
            }
            sum += probabilities[i];
        }
        RouletteWheel Wheel = new RouletteWheel(probabilities, randomsrc);
        double bin = sum / n;
        for (int i = 0; i < n; i++) {
            double a = i * bin; // lb of each bin
            double b = (i + 1) * bin; // ub of each bin
            int sel = Wheel.spin(a, b);
            list.add(options[sel]);
        }
        break;
    }
    case "custom":
        break;
    }
    return list.toArray(new String[0]);
}

From source file:org.apache.mahout.math.random.NormalTest.java

@Test
public void testSample() throws Exception {
    double[] data = new double[10001];
    Sampler<Double> sampler = new Normal();
    for (int i = 0; i < data.length; i++) {
        data[i] = sampler.sample();//w  w  w. ja v a  2  s.c om
    }
    Arrays.sort(data);

    NormalDistribution reference = new NormalDistribution(RandomUtils.getRandom().getRandomGenerator(), 0, 1,
            NormalDistribution.DEFAULT_INVERSE_ABSOLUTE_ACCURACY);
    assertEquals("Median", reference.inverseCumulativeProbability(0.5), data[5000], 0.04);
}

From source file:org.deeplearning4j.arbiter.optimize.parameter.TestParameterSpaces.java

@Test
public void testContinuousParameterSpace() {

    ContinuousParameterSpace cps = new ContinuousParameterSpace(0, 1);
    cps.setIndices(0);/*  ww w . j  av a2s.  c  o m*/

    for (int i = 0; i < 10; i++) {
        double d = i / 10.0;
        assertEquals(d, cps.getValue(new double[] { d }), 0.0);
    }

    cps = new ContinuousParameterSpace(10, 20);
    cps.setIndices(0);

    for (int i = 0; i < 10; i++) {
        double d = i / 10.0;
        double exp = d * 10 + 10;
        assertEquals(exp, cps.getValue(new double[] { d }), 0.0);
    }

    cps = new ContinuousParameterSpace(new NormalDistribution(0, 1));
    NormalDistribution nd = new NormalDistribution(0, 1);
    cps.setIndices(0);
    for (int i = 0; i < 11; i++) {
        double d = i / 10.0;
        assertEquals(nd.inverseCumulativeProbability(d), cps.getValue(new double[] { d }), 1e-4);
    }
}

From source file:org.drugis.addis.presentation.ContinuousMeasurementPresentation.java

public String normConfIntervalString() {
    DecimalFormat df = new DecimalFormat("###0.00");
    NormalDistribution distribution = new NormalDistribution(getBean().getMean(), getBean().getStdDev());
    Interval<Double> confInterval;
    confInterval = new Interval<Double>(distribution.inverseCumulativeProbability(0.025),
            distribution.inverseCumulativeProbability(0.975));

    return df.format(getBean().getMean()) + " (" + df.format(confInterval.getLowerBound()) + ", "
            + df.format(confInterval.getUpperBound()) + ")";
}

From source file:org.easotope.shared.math.QQPlot.java

public ArrayList<Point> getPoints() {
    if (needsRecalculation) {
        Statistics statistics = new Statistics();

        for (Point point : points) {
            statistics.addNumber(point.getSampleQuantile());
        }//  w ww . j a v  a 2s.  com

        NormalDistribution distribution = new NormalDistribution(statistics.getMean(),
                statistics.getStandardDeviationSample());

        Collections.sort(points);

        int i = 1;
        for (Point point : points) {
            double percentile = (double) i / ((double) points.size() + 1.0d);
            point.setTheoreticalQuantile(distribution.inverseCumulativeProbability(percentile));
            i++;
        }
    }

    return points;
}

From source file:org.hawkular.datamining.forecast.PredictionIntervalMultipliers.java

public static double multiplier(int percentage) {
    if (percentage < 0 || percentage > 100) {
        throw new IllegalArgumentException();
    }/*  www  .  j  av  a  2 s  .  c  om*/

    Double multiplier = cashedMultipliers.get(percentage);
    if (multiplier == null) {
        NormalDistribution normalDistribution = new NormalDistribution(0, 1);
        multiplier = normalDistribution.inverseCumulativeProbability(0.5 + (percentage * 0.01) / 2);
        cashedMultipliers.put(percentage, multiplier);
    }

    return multiplier;
}

From source file:org.wso2.extension.siddhi.execution.reorder.AlphaKSlackExtension.java

@Override
protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor,
        StreamEventCloner streamEventCloner, ComplexEventPopulater complexEventPopulater) {
    ComplexEventChunk<StreamEvent> complexEventChunk = new ComplexEventChunk<StreamEvent>(false);
    lock.lock();//from w  ww.ja  v  a  2 s.co  m
    NormalDistribution actualDistribution = new NormalDistribution();

    double criticalValue = Math.abs(actualDistribution.inverseCumulativeProbability((1 - confidenceLevel) / 2));
    WindowCoverage obj = new WindowCoverage(errorThreshold);
    try {
        while (streamEventChunk.hasNext()) {
            StreamEvent event = streamEventChunk.next();

            if (event.getType() != ComplexEvent.Type.TIMER) {
                streamEventChunk.remove();
                long timestamp = (Long) timestampExecutor.execute(event);
                timestampList.add(timestamp);
                double correlationField = (Double) correlationFieldExecutor.execute(event);
                dataItemList.add(correlationField);
                if (discardFlag) {
                    if (timestamp < lastSentTimestamp) {
                        continue;
                    }
                }

                if (timerFlag) {
                    timerFlag = false;
                    lastScheduledTimestamp = lastScheduledTimestamp + timerDuration;
                    scheduler.notifyAt(lastScheduledTimestamp);
                }

                List<StreamEvent> eventList = primaryTreeMap.get(timestamp);
                if (eventList == null) {
                    eventList = new ArrayList<StreamEvent>();
                    primaryTreeMap.put(timestamp, eventList);
                }
                eventList.add(event);
                counter += 1;
                if (counter > batchSize) {
                    long adjustedBatchsize = Math.round(batchSize * 0.75);
                    alpha = calculateAlpha(obj.calculateWindowCoverageThreshold(criticalValue, dataItemList),
                            obj.calculateRuntimeWindowCoverage(timestampList, adjustedBatchsize));
                    counter = 0;
                    timestampList = new ArrayList<Long>();
                    dataItemList = new ArrayList<Double>();
                }
                if (timestamp > largestTimestamp) {
                    largestTimestamp = timestamp;
                    long minTimestamp = primaryTreeMap.firstKey();
                    long timeDifference = largestTimestamp - minTimestamp;
                    if (timeDifference > k) {
                        if (timeDifference < maxK) {
                            k = Math.round(timeDifference * alpha);
                        } else {
                            k = maxK;
                        }
                    }

                    Iterator<Map.Entry<Long, List<StreamEvent>>> entryIterator = primaryTreeMap.entrySet()
                            .iterator();
                    while (entryIterator.hasNext()) {
                        Map.Entry<Long, List<StreamEvent>> entry = entryIterator.next();
                        List<StreamEvent> list = secondaryTreeMap.get(entry.getKey());
                        if (list != null) {
                            list.addAll(entry.getValue());
                        } else {
                            secondaryTreeMap.put(entry.getKey(), new ArrayList<StreamEvent>(entry.getValue()));
                        }
                    }
                    primaryTreeMap = new TreeMap<Long, List<StreamEvent>>();
                    entryIterator = secondaryTreeMap.entrySet().iterator();
                    while (entryIterator.hasNext()) {
                        Map.Entry<Long, List<StreamEvent>> entry = entryIterator.next();
                        if (entry.getKey() + k <= largestTimestamp) {
                            entryIterator.remove();
                            List<StreamEvent> timeEventList = entry.getValue();
                            lastSentTimestamp = entry.getKey();

                            for (StreamEvent aTimeEventList : timeEventList) {
                                complexEventChunk.add(aTimeEventList);
                            }
                        }
                    }
                }
            } else {
                if (secondaryTreeMap.size() > 0) {
                    for (Map.Entry<Long, List<StreamEvent>> longListEntry : secondaryTreeMap.entrySet()) {
                        List<StreamEvent> timeEventList = longListEntry.getValue();

                        for (StreamEvent aTimeEventList : timeEventList) {
                            complexEventChunk.add(aTimeEventList);
                        }
                    }

                    secondaryTreeMap = new TreeMap<Long, List<StreamEvent>>();

                }

                if (primaryTreeMap.size() > 0) {
                    for (Map.Entry<Long, List<StreamEvent>> longListEntry : primaryTreeMap.entrySet()) {
                        List<StreamEvent> timeEventList = longListEntry.getValue();

                        for (StreamEvent aTimeEventList : timeEventList) {
                            complexEventChunk.add(aTimeEventList);
                        }
                    }

                    primaryTreeMap = new TreeMap<Long, List<StreamEvent>>();
                }

                timerFlag = true;
            }
        }
    } catch (ArrayIndexOutOfBoundsException ec) {
        //This happens due to user specifying an invalid field index.
        throw new ExecutionPlanCreationException("The very first parameter must be an "
                + "Integer with a valid " + " field index (0 to (fieldsLength-1)).");
    }
    lock.unlock();
    nextProcessor.process(complexEventChunk);
}