List of usage examples for org.apache.commons.math3.distribution NormalDistribution cumulativeProbability
public double cumulativeProbability(double x)
From source file:com.example.PJS.java
/** Nuevas funciones usando la lib de Apache Commons * http://commons.apache.org/proper/commons-math/apidocs/index.html * @param z//from ww w .ja v a 2s . c o m * @return */ public static double CNDF(double z) { NormalDistribution nD = new NormalDistribution(); double cndf; cndf = nD.cumulativeProbability(z); return cndf; }
From source file:eu.crisis_economics.abm.ratings.NaiveMertonDistanceToDefaultAlgorithm.java
/** * Compute the (Naive) Merton Distance-to-Default measure for an agent * in a specified forecast timeframe T. * //from w w w . ja v a 2 s . c om * @param debtFaceValue (F) * The face value of the agent debt. * @param equityVolatility * The volatility of agent equity. * @param equity (E) * The current agent equity. * @param expectedAssetReturn * The asset return of the agent during the last forecast window. * @param forecastHorizon (T) * The period over which to forecast agent default. * @return * A Pair<Double, Double> in the format: * Pair<Naive Merton Distance-to-Default, Naive Merton * Probability-of-Default>, in the period of the forecast timeframe. * * It is not permissible for: both the debt face value (F) and equity (E) * aguments to be simultaneously zero; for the debt face value (F) to be * negative; or for the forecase horizon (T) to be zero or negative. If the * debt face value is zero and equity is nonzero, then the distance to * default is taken to be +Infinity. */ static Pair<Double, Double> compute(final double debtFaceValue, final double equityVolatility, final double equity, final double expectedAssetReturn, final double forecastHorizon) { Preconditions.checkArgument(equity != 0. || debtFaceValue > 0.); Preconditions.checkArgument(forecastHorizon > 0.); Preconditions.checkArgument(debtFaceValue >= 0.); final double debtVolatility = .05 + .25 * equityVolatility, overallValueVolatility = equityVolatility * equity / (equity + debtFaceValue) + debtVolatility * debtFaceValue / (equity + debtFaceValue); double distanceToDefault = Math.log((equity + debtFaceValue) / debtFaceValue) + (expectedAssetReturn - .5 * overallValueVolatility * overallValueVolatility) * forecastHorizon; distanceToDefault /= Math.sqrt(forecastHorizon) * overallValueVolatility; NormalDistribution normalDist = new NormalDistribution(); final double defaultProbability = normalDist.cumulativeProbability(-distanceToDefault); return Pair.create(distanceToDefault, defaultProbability); }
From source file:me.datamining.cluster.STING.java
/** * // w ww .j a va2 s. com * @param value * @param mean * @param std * @return */ public static double standardPDF(double value, double mean, double std) { if (std == 0) { return 0; } NormalDistribution sdf = new NormalDistribution(mean, std); try { return sdf.cumulativeProbability(value); } catch (NumberIsTooLargeException e) { return 0; } }
From source file:edu.jhuapl.bsp.detector.OpenMath.java
public static double normcdf(double stat, double m, double s) { double result = 0; NormalDistribution normdist = new NormalDistribution(m, s); result = normdist.cumulativeProbability(stat); return result; }
From source file:io.druid.query.aggregation.teststats.PvaluefromZscorePostAggregator.java
private double cumulativeProbability(double x) { try {/* www. ja va 2 s .c o m*/ NormalDistribution normDist = new NormalDistribution(); return normDist.cumulativeProbability(x); } catch (IllegalArgumentException ex) { return Double.NaN; } }
From source file:com.itemanalysis.psychometrics.statistics.RobustZ.java
private void compute(double x, double median, double iqr) { NormalDistribution normal = new NormalDistribution(); if (iqr > 0.0) { z = (x - median) / (0.74 * iqr); pvalue = normal.cumulativeProbability(z); if (z > 0.0) { pvalue = 1.0 - pvalue;//from w ww.ja v a 2 s . c o m } } }
From source file:com.ibm.iot.iotspark.IoTZScore.java
public double zScoreToPercentile(double zScore) { double percentile = 0; NormalDistribution dist = new NormalDistribution(); percentile = dist.cumulativeProbability(zScore) * 100; return percentile; }
From source file:blackscholes.EuropeanCall.java
public double ValuationMethod() { NormalDistribution N = new NormalDistribution(); double _b = r - b; double d1 = (Math.log(S / X) + (b + 0.5 * Math.pow(sigma, 2)) * T) / (sigma * Math.sqrt(T)); double d2 = d1 - sigma * Math.sqrt(T); double Nd1 = N.cumulativeProbability(d1); double Nd2 = N.cumulativeProbability(d2); return S * Nd1 - X * Nd2 * Math.exp((b - r) * T); }
From source file:MannWhitneyUTest.java
/** * @param Umin//from ww w . j av a 2s . co m * smallest Mann-Whitney U value * @param n1 * number of subjects in first sample * @param n2 * number of subjects in second sample * @return two-sided asymptotic p-value * @throws ConvergenceException * if the p-value can not be computed due to a convergence error * @throws MaxCountExceededException * if the maximum number of iterations is exceeded */ private double calculateAsymptoticPValue(final double Umin, final int n1, final int n2) throws ConvergenceException, MaxCountExceededException { System.out.println("6"); /* * long multiplication to avoid overflow (double not used due to * efficiency and to avoid precision loss) */ final long n1n2prod = (long) n1 * n2; // http://en.wikipedia.org/wiki/Mann%E2%80%93Whitney_U#Normal_approximation final double EU = n1n2prod / 2.0; final double VarU = n1n2prod * (n1 + n2 + 1) / 12.0; final double z = (Umin - EU) / FastMath.sqrt(VarU); // No try-catch or advertised exception because args are valid // pass a null rng to avoid unneeded overhead as we will not sample from // this distribution final NormalDistribution standardNormal = new NormalDistribution(null, 0, 1); return 2 * standardNormal.cumulativeProbability(z); }
From source file:de.bund.bfr.math.LodFunction.java
@Override public double value(double[] point) { double sd = Double.NaN; for (int ip = 0; ip < nParams; ip++) { if (parameters.get(ip).equals(sdParam)) { sd = Math.abs(point[ip]); } else {/*from w ww . jav a2 s. co m*/ parser.setVarValue(parameters.get(ip), point[ip]); } } if (sd == 0.0) { return Double.NaN; } double logLikelihood = 0.0; for (int iv = 0; iv < nValues; iv++) { for (Map.Entry<String, List<Double>> entry : variableValues.entrySet()) { parser.setVarValue(entry.getKey(), entry.getValue().get(iv)); } try { double value = parser.evaluate(function); if (!Double.isFinite(value)) { return Double.NaN; } NormalDistribution normDist = new NormalDistribution(value, sd); logLikelihood += targetValues.get(iv) > levelOfDetection ? Math.log(normDist.density(targetValues.get(iv))) : Math.log(normDist.cumulativeProbability(levelOfDetection)); } catch (ParseException e) { e.printStackTrace(); return Double.NaN; } } return logLikelihood; }