List of usage examples for org.apache.commons.math3.distribution NormalDistribution inverseCumulativeProbability
public double inverseCumulativeProbability(final double p) throws OutOfRangeException
From source file:org.wso2.extension.siddhi.execution.var.models.parametric.ParametricVaRCalculator.java
/** * @return the var of the portfolio Calculate var for the given portfolio using updated covariances and means *///from w w w .j a va 2s .c o m @Override public Double processData(Portfolio portfolio, Event event) { Asset asset = getAssetPool().get(event.getSymbol()); if (asset.getNumberOfReturnValues() > 1) { RealMatrix varCovarMatrix = new Array2DRowRealMatrix(getVarCovarMatrix(portfolio)); RealMatrix weightageMatrix = new Array2DRowRealMatrix(getWeightageMatrix(portfolio)); RealMatrix meanMatrix = new Array2DRowRealMatrix(getMeanMatrix(portfolio)); RealMatrix portfolioVarianceMatrix = weightageMatrix.multiply(varCovarMatrix) .multiply(weightageMatrix.transpose()); RealMatrix portfolioMeanMatrix = weightageMatrix.multiply(meanMatrix.transpose()); double portfolioVariance = portfolioVarianceMatrix.getData()[0][0]; double portfolioMean = portfolioMeanMatrix.getData()[0][0]; if (portfolioVariance == 0) { // a normal distribution cannot be defined when sd = 0 return null; } double portfolioStandardDeviation = Math.sqrt(portfolioVariance); NormalDistribution normalDistribution = new NormalDistribution(portfolioMean, portfolioStandardDeviation); double zValue = normalDistribution.inverseCumulativeProbability(1 - getConfidenceInterval()); double var = zValue * portfolio.getTotalPortfolioValue(); return var; } return null; }
From source file:ro.hasna.ts.math.distribution.NormalDistributionDivider.java
@Override public double[] getBreakpoints(int areas) { if (areas < 2) { throw new NumberIsTooSmallException(areas, 2, true); }//from ww w . java 2 s. c om NormalDistribution normalDistribution = new NormalDistribution(); int len = areas - 1; double[] result = new double[len]; double searchArea = 1.0 / areas; for (int i = 0; i < len; i++) { result[i] = normalDistribution.inverseCumulativeProbability(searchArea * (i + 1)); } return result; }
From source file:simulacion.Operaciones.java
public int pReordenOpt(float k, float h, float cp) { NormalDistribution d = new NormalDistribution(); double mL = obtenerMedia(entrega); double mD = obtenerMedia(demanda); double mX = mL * mD; //System.out.println(obtenerVarianza(entrega)); double desvX = Math.sqrt((mL * obtenerVarianza(demanda)) + ((mD * mD) * obtenerVarianza(entrega))); double p = (h * Qopt(k, h)) / (cp * mD * 365); double valor = d.inverseCumulativeProbability(p); double z = 1 - valor; return (int) Math.round((z * desvX) + mX); }
From source file:SimulationStructure.Statistics.java
public double[] getSimTimeIS(double confidenceLevel) { double realLevel = 1 - ((1 - confidenceLevel) / 2); double[] confidenceInterval = new double[2]; double avg = simTimeTotal / replicationCount; double S = Math.sqrt((simTimeTotalPower / replicationCount) - Math.pow(avg, 2)); double distrib = 0; if (replicationCount > 1) { if (replicationCount < 30) { TDistribution student = new TDistribution(replicationCount); distrib = student.inverseCumulativeProbability(realLevel); } else {//ww w.java2 s . co m NormalDistribution normal = new NormalDistribution(0, 1); distrib = normal.inverseCumulativeProbability(realLevel); } confidenceInterval[0] = avg - (distrib * S / Math.sqrt((replicationCount - 1))); confidenceInterval[1] = avg + (distrib * S / Math.sqrt((replicationCount - 1))); } else { return new double[] { 0, 0 }; } return confidenceInterval; }
From source file:topt.FXMLDocumentController.java
public double computeNrStDev(double mean, double stDev, double expectedError) { System.out.println(mean + " " + stDev); NormalDistribution normalDistribution = new NormalDistribution(mean, stDev); double nrOfStDev = (mean - normalDistribution.inverseCumulativeProbability(expectedError / 2)) / stDev; System.out.println(normalDistribution.inverseCumulativeProbability(expectedError / 2)); System.out.println(nrOfStDev); nrOfStDev = roundResult(nrOfStDev);//from ww w. j av a 2 s . co m this.pstwo.setText("Prawdopodobientwo bdu wynosi " + Double.toString(nrOfStDev) + "%"); System.out.println("Liczba odchylen" + Double.toString(nrOfStDev)); // this.nrOfStDevLabel.setText("Liczba ochyle standardowych przy danym bdzie : " + Double.toString(nrOfStDev)); return nrOfStDev; }
From source file:tv.dyndns.kishibe.qmaclone.server.Game.java
@VisibleForTesting void calculateRating(List<PlayerStatus> players) { NormalDistribution normalDistribution = new NormalDistribution(); // //from w w w. java 2 s.c o m // http://topcoder.g.hatena.ne.jp/n4_t/20081222/ // http://apps.topcoder.com/wiki/display/tc/Algorithm+Competition+Rating+System Preconditions.checkState(2 <= players.size()); int numCoders = players.size(); double sumRating = 0.0; for (PlayerStatus player : players) { sumRating += player.getRating(); } double aveRating = sumRating / numCoders; // The competition factor is calculated: double sumVolatility2 = 0.0; double sumDiffRatingAveRating = 0.0; for (PlayerStatus player : players) { sumVolatility2 += player.getVolatility() * player.getVolatility(); double diffRatingAveRating = player.getRating() - aveRating; sumDiffRatingAveRating += diffRatingAveRating * diffRatingAveRating; } double cf = Math.sqrt(sumVolatility2 / numCoders + sumDiffRatingAveRating / (numCoders - 1)); // ?? Collections.sort(players, new Comparator<PlayerStatus>() { @Override public int compare(PlayerStatus o1, PlayerStatus o2) { int black1; int black2; try { int userCode1 = o1.getUserCode(); int rating1 = o1.getRating(); int userCode2 = o2.getUserCode(); int rating2 = o2.getRating(); black1 = (restrictedUserUtils.checkAndUpdateRestrictedUser(userCode1, "127.0.0.1", RestrictionType.MATCH) && rating1 > 1700) ? 1 : 0; black2 = (restrictedUserUtils.checkAndUpdateRestrictedUser(userCode2, "127.0.0.1", RestrictionType.MATCH) && rating2 > 1700) ? 1 : 0; } catch (DatabaseException e) { throw Throwables.propagate(e); } return black1 != black2 ? black1 - black2 : o2.getScore() - o1.getScore(); } }); for (int i = 0; i < players.size(); ++i) { if (0 < i && players.get(i - 1).getScore() == players.get(i).getScore()) { // ????? players.get(i).setHumanRank(players.get(i - 1).getHumanRank()); } else { players.get(i).setHumanRank(i + 1); } } // ?????? // http://kishibe.dyndns.tv/qmaclone/wiki/wiki.cgi?page=BugTrack-QMAClone%2F490 // for (PlayerStatus playerStatus : players) { // if (badUserManager.isLimitedUser(playerStatus.getUserCode(), null)) { // playerStatus.setHumanRank(players.size()); // } // } for (PlayerStatus my : players) { if (!my.isHuman()) { continue; } double myRating = my.getRating(); double myVolatility = my.getVolatility(); // Win Probability Estimation Algorithm: double eRank = 0.5; for (PlayerStatus player : players) { double hisVolatility = player.getVolatility(); double wp = 0.5; wp = 0.5 * (Erf .erf((player.getRating() - myRating) / Math.sqrt(2 * (hisVolatility * hisVolatility + myVolatility * myVolatility))) + 1.0); // BugTrack-QMAClone/603 - QMAClone wiki // http://kishibe.dyndns.tv/qmaclone/wiki/wiki.cgi?page=BugTrack%2DQMAClone%2F603 if (my != player && my.getUserCode() == player.getUserCode()) { wp = 0.0; } eRank += wp; } // The expected performance of the coder is calculated: double ePerf = -normalDistribution.inverseCumulativeProbability((eRank - 0.5) / numCoders); // The actual performance of each coder is calculated: double aPerf = -normalDistribution.inverseCumulativeProbability((my.getHumanRank() - 0.5) / numCoders); // The performed as rating of the coder is calculated: double perfAs = myRating + cf * (aPerf - ePerf); // The weight of the competition for the coder is calculated: double weight = 1.0 / (1 - (0.42 / (my.getPlayCount() + 1) + 0.18)) - 1.0; // A cap is calculated: double cap = 150 + 1500 / (my.getPlayCount() + 2); // The new rating of the coder is calculated: double newRating = (myRating + weight * perfAs) / (1.0 + weight); newRating = Math.min(newRating, myRating + cap); newRating = Math.max(newRating, myRating - cap); // The new volatility of the coder is calculated: double diffRating = newRating - myRating; double newVolatility = Math .sqrt(diffRating * diffRating / weight + myVolatility * myVolatility / (weight + 1)); my.setNewRating((int) Math.rint(newRating)); my.setNewVolatility((int) Math.rint(newVolatility)); } // ????? Collections.sort(players, new Comparator<PlayerStatus>() { @Override public int compare(PlayerStatus o1, PlayerStatus o2) { return o2.getScore() - o1.getScore(); } }); for (int i = 0; i < players.size(); ++i) { if (0 < i && players.get(i - 1).getScore() == players.get(i).getScore()) { // ????? players.get(i).setHumanRank(players.get(i - 1).getHumanRank()); } else { players.get(i).setHumanRank(i + 1); } } }
From source file:weka.attributeSelection.BiNormalSeperationEval.java
/** * Initializes an BNS attribute evaluator. * Discretizes all attributes that are numeric. * * @param data set of instances serving as training data * @throws Exception if the evaluator has not been * generated successfully/*from w w w . j a v a 2 s . c om*/ */ public void buildEvaluator(Instances data) throws Exception { // can evaluator handle data? getCapabilities().testWithFail(data); int classIndex = data.classIndex(); int numInstances = data.numInstances(); int numClasses = data.attribute(classIndex).numValues(); double[] tp = new double[data.numAttributes()]; double[] fp = new double[data.numAttributes()]; double[] totalPos = new double[data.numAttributes()]; double[] totalNeg = new double[data.numAttributes()]; // Initialize values for (int i = 0; i < data.numAttributes(); i++) { tp[i] = 0; fp[i] = 0; totalPos[i] = 0; totalNeg[i] = 0; } Instance curInst; String classValue; double attValue; for (int i = 0; i < numInstances; i++) { curInst = data.get(i); classValue = curInst.stringValue(classIndex); for (int j = 0; j < data.numAttributes(); j++) { if (j != classIndex) { attValue = curInst.value(j); if (classValue.equals("1")) totalPos[j]++; if (classValue.equals("0")) totalNeg[j]++; if (classValue.equals("1") && attValue > 0) tp[j]++; if (classValue.equals("0") && attValue == 0) fp[j]++; } } } double[] tpr = new double[data.numAttributes()]; double[] fpr = new double[data.numAttributes()]; NormalDistribution nd = new NormalDistribution(); m_zScores = new double[data.numAttributes()]; for (int i = 0; i < data.numAttributes(); i++) { tpr[i] = tp[i] / totalPos[i]; fpr[i] = fp[i] / totalNeg[i]; if (tp[i] == 0) tpr[i] = 0.00005; if (fp[i] == 0) fpr[i] = 0.00005; m_zScores[i] = nd.inverseCumulativeProbability(tpr[i]) - nd.inverseCumulativeProbability(fpr[i]); } }