List of usage examples for org.apache.commons.math3.distribution NormalDistribution NormalDistribution
public NormalDistribution(double mean, double sd) throws NotStrictlyPositiveException
From source file:org.wso2.extension.siddhi.execution.var.models.parametric.ParametricVaRCalculator.java
/** * @return the var of the portfolio Calculate var for the given portfolio using updated covariances and means *//*w w w . j a v a2 s .c om*/ @Override public Double processData(Portfolio portfolio, Event event) { Asset asset = getAssetPool().get(event.getSymbol()); if (asset.getNumberOfReturnValues() > 1) { RealMatrix varCovarMatrix = new Array2DRowRealMatrix(getVarCovarMatrix(portfolio)); RealMatrix weightageMatrix = new Array2DRowRealMatrix(getWeightageMatrix(portfolio)); RealMatrix meanMatrix = new Array2DRowRealMatrix(getMeanMatrix(portfolio)); RealMatrix portfolioVarianceMatrix = weightageMatrix.multiply(varCovarMatrix) .multiply(weightageMatrix.transpose()); RealMatrix portfolioMeanMatrix = weightageMatrix.multiply(meanMatrix.transpose()); double portfolioVariance = portfolioVarianceMatrix.getData()[0][0]; double portfolioMean = portfolioMeanMatrix.getData()[0][0]; if (portfolioVariance == 0) { // a normal distribution cannot be defined when sd = 0 return null; } double portfolioStandardDeviation = Math.sqrt(portfolioVariance); NormalDistribution normalDistribution = new NormalDistribution(portfolioMean, portfolioStandardDeviation); double zValue = normalDistribution.inverseCumulativeProbability(1 - getConfidenceInterval()); double var = zValue * portfolio.getTotalPortfolioValue(); return var; } return null; }
From source file:playground.sergioo.passivePlanning2012.core.population.AgendaBasePersonImpl.java
private static Agenda createAgenda(Person person) { Agenda agenda = new Agenda(); agenda.addElement("home", new NormalDistribution(0, 0.1), new NormalDistribution(10 * 3600, 2 * 3600)); double time = 0; for (Plan plan : person.getPlans()) for (PlanElement planElement : plan.getPlanElements()) { if (planElement instanceof Activity && !((Activity) planElement).getType().equals("home") && !((Activity) planElement).getType().equals(PtConstants.TRANSIT_ACTIVITY_TYPE)) { double duration = ((Activity) planElement).getEndTime() - time; if (duration < 900) duration = 900;/*from w ww .j a v a 2s.c om*/ if (agenda.containsType(((Activity) planElement).getType())) agenda.addObservation(((Activity) planElement).getType(), duration); else agenda.addElement(((Activity) planElement).getType(), duration); } if (planElement instanceof Activity) if (((Activity) planElement).getEndTime() == Time.UNDEFINED_TIME) time += ((Activity) planElement).getMaximumDuration(); else time = ((Activity) planElement).getEndTime(); else time += ((Leg) planElement).getTravelTime(); } return agenda; }
From source file:plugins.ImageAutocorrelation.java
/** * Used to execute this plugin tool./* w w w.j av a2 s .c o m*/ */ @Override public void run() { amIActive = true; WhiteboxRaster image; int col, row, numImages, x, y; int cols, rows; int a = 0; double noData; double z, zn; int progress = 0; String progressMessage = ""; String inputFilesString = null; String[] imageFiles; long[] n; double[] mean; String[] shortNames; String[] units; double[] I; double[] stdDev; double totalDeviation; int[] dX; int[] dY; double numerator, W; //double recipRoot2 = 1 / Math.sqrt(2); //double[] wNeighbour = {recipRoot2, 1, recipRoot2, 1, recipRoot2, 1, recipRoot2, 1}; //double[] wNeighbour = {1, 1, 1, 1}; if (args.length <= 0) { showFeedback("Plugin parameters have not been set."); return; } inputFilesString = args[0]; imageFiles = inputFilesString.split(";"); numImages = imageFiles.length; if (args[1].toLowerCase().contains("bishop")) { dX = new int[] { 1, 1, -1, -1 }; dY = new int[] { -1, 1, 1, -1 }; } else if (args[1].toLowerCase().contains("queen") || args[1].toLowerCase().contains("king")) { dX = new int[] { 1, 1, 1, 0, -1, -1, -1, 0 }; dY = new int[] { -1, 0, 1, 1, 1, 0, -1, -1 }; } else { // go with the rook default dX = new int[] { 1, 0, -1, 0 }; dY = new int[] { 0, 1, 0, -1 }; } try { //initialize the image data arrays double sigmaZ; n = new long[numImages]; mean = new double[numImages]; I = new double[numImages]; shortNames = new String[numImages]; units = new String[numImages]; stdDev = new double[numImages]; double[] E_I = new double[numImages]; double[] varNormality = new double[numImages]; double[] varRandomization = new double[numImages]; double[] zN = new double[numImages]; double[] zR = new double[numImages]; double[] pValueN = new double[numImages]; double[] pValueR = new double[numImages]; double[] data; NormalDistribution distribution = new NormalDistribution(0, 1); for (a = 0; a < numImages; a++) { progressMessage = "Image " + (a + 1) + " of " + numImages; image = new WhiteboxRaster(imageFiles[a], "r"); noData = image.getNoDataValue(); rows = image.getNumberRows(); cols = image.getNumberColumns(); shortNames[a] = image.getShortHeaderFile(); if (!image.getZUnits().toLowerCase().equals("not specified")) { units[a] = image.getZUnits(); } else { units[a] = ""; } sigmaZ = 0; for (row = 0; row < rows; row++) { data = image.getRowValues(row); for (col = 0; col < cols; col++) { if (data[col] != noData) { sigmaZ += data[col]; n[a]++; } } if (cancelOp) { cancelOperation(); return; } progress = (int) (row * 100.0 / rows); updateProgress(progressMessage, progress); } mean[a] = sigmaZ / n[a]; E_I[a] = -1.0 / (n[a] - 1); totalDeviation = 0; W = 0; numerator = 0; double S2 = 0; double wij; int numNeighbours = dX.length; double k = 0; for (row = 0; row < rows; row++) { for (col = 0; col < cols; col++) { z = image.getValue(row, col); if (z != noData) { totalDeviation += (z - mean[a]) * (z - mean[a]); k += (z - mean[a]) * (z - mean[a]) * (z - mean[a]) * (z - mean[a]); wij = 0; for (int i = 0; i < numNeighbours; i++) { x = col + dX[i]; y = row + dY[i]; zn = image.getValue(y, x); if (zn != noData) { // two valid neighbour pairs W += 1.0; numerator += (z - mean[a]) * (zn - mean[a]); //* weight of 1.0 wij += 1; } } S2 += wij * wij; } } if (cancelOp) { cancelOperation(); return; } progress = (int) (row * 100.0 / rows); updateProgress(progressMessage, progress); } double S1 = 4 * W; S2 = S2 * 4; stdDev[a] = Math.sqrt(totalDeviation / (n[a] - 1)); I[a] = n[a] * numerator / (totalDeviation * W); varNormality[a] = (n[a] * n[a] * S1 - n[a] * S2 + 3 * W * W) / ((W * W) * (n[a] * n[a] - 1)); zN[a] = (I[a] - E_I[a]) / (Math.sqrt(varNormality[a])); pValueN[a] = 2d * (1.0 - distribution.cumulativeProbability(Math.abs(zN[a]))); k = k / (n[a] * stdDev[a] * stdDev[a] * stdDev[a] * stdDev[a]); varRandomization[a] = (n[a] * ((n[a] * n[a] - 3 * n[a] + 3) * S1 - n[a] * S2 + 3 * W * W) - k * (n[a] * n[a] - n[a]) * S1 - 2 * n[a] * S1 + 6 * W * W) / ((n[a] - 1) * (n[a] - 2) * (n[a] - 3) * W * W); zR[a] = (I[a] - E_I[a]) / (Math.sqrt(varRandomization[a])); pValueR[a] = 2d * (1.0 - distribution.cumulativeProbability(Math.abs(zR[a]))); image.close(); progress = (int) (100f * (a + 1) / numImages); updateProgress(progressMessage, progress); } StringBuilder retstr = new StringBuilder(); DecimalFormat df1 = new DecimalFormat("###,###,###,###"); DecimalFormat df2 = new DecimalFormat("0.0000"); retstr.append("SPATIAL AUTOCORRELATION\n"); for (a = 0; a < numImages; a++) { retstr.append("\n"); retstr.append("Input image:\t\t\t").append(shortNames[a]).append("\n"); retstr.append("Number of cells included:\t\t").append(df1.format(n[a])).append("\n"); if (units[a].equals("")) { retstr.append("Mean of cells included:\t\t").append(df2.format(mean[a])).append("\n"); } else { retstr.append("Mean of cells included:\t\t").append(df2.format(mean[a])).append(" ") .append(units[a]).append("\n"); } retstr.append("Spatial autocorrelation (Moran's I):\t").append(df2.format(I[a])).append("\n"); retstr.append("Expected value:\t\t").append(df2.format(E_I[a])).append("\n"); retstr.append("Variance of I (normality assumption):\t").append(df2.format(varNormality[a])) .append("\n"); retstr.append("z test stat (normality assumption):\t").append(df2.format(zN[a])).append("\n"); retstr.append("p-value (normality assumption):\t").append(df2.format(pValueN[a])).append("\n"); retstr.append("Variance of I (randomization assumption):\t").append(df2.format(varRandomization[a])) .append("\n"); retstr.append("z test stat (randomization assumption):\t").append(df2.format(zR[a])).append("\n"); retstr.append("p-value (randomization assumption):\t").append(df2.format(pValueR[a])).append("\n"); } // System.out.println(retstr.toString()); returnData(retstr.toString()); } catch (OutOfMemoryError oe) { myHost.showFeedback("An out-of-memory error has occurred during operation."); } catch (Exception e) { myHost.showFeedback("An error has occurred during operation. See log file for details."); myHost.logException("Error in " + getDescriptiveName(), e); } finally { updateProgress("Progress: ", 0); // tells the main application that this process is completed. amIActive = false; myHost.pluginComplete(); } }
From source file:producer.consumer.Consumer.java
public Consumer(BlockingQueue<String> queue, double mean, double deviation, Buffer buf) { this.queue = queue; this.distribution = new NormalDistribution(mean, deviation); this.buf = buf; }
From source file:producer.consumer.Producer.java
public Producer(BlockingQueue<String> queue, int steps, double mean, double deviation, Buffer buf) { this.queue = queue; this.steps = steps; this.distribution = new NormalDistribution(mean, deviation); this.buf = buf; }
From source file:pyromaniac.Algorithm.BalzerOUCallFrequencyTable.java
/** * _calculate probability./*from w ww .j a v a 2 s .c o m*/ * * @param mode the mode * @param segmentNumber the segment number * @return the BigDecimal [] of probabilities, all with scale of 10. */ private BigDecimal[] _calculateProbabilitiesHelper(int segmentNumber, int mode) { BigDecimal sd, mean, modeBD; //this multiplicative factor was taken from elsewhere... BigDecimal flowEffect = new BigDecimal("0.003").multiply(new BigDecimal(segmentNumber)).setScale(SCALE, BigDecimal.ROUND_HALF_UP); modeBD = new BigDecimal(mode); if (mode >= 6) { mean = new BigDecimal(mode).setScale(SCALE, BigDecimal.ROUND_HALF_UP); //standard deviation is 0.03 + effect of RefLen + effect of flow position sd = new BigDecimal("0.03494").add(mean.multiply(new BigDecimal("0.06856"))).add(flowEffect); } else { mean = new BigDecimal(this.normalDistParams.get(mode).getFirst()).setScale(SCALE, BigDecimal.ROUND_HALF_UP); sd = new BigDecimal(this.normalDistParams.get(mode).getSecond()).add(flowEffect).setScale(SCALE, BigDecimal.ROUND_HALF_UP); } NormalDistribution norm = new NormalDistribution(mean.doubleValue(), sd.doubleValue()); try { //due to rounding... //cumulative probability [X <= x] //so prob under is [X <= MODE - 0.51], and prob over is 1 - prob [X <= MODE + 0.49] (i.e. prob X > MODE + 0.49) BigDecimal lowerBound = modeBD.subtract(new BigDecimal(SUBTRACT_FOR_LB)).setScale(SCALE, BigDecimal.ROUND_HALF_UP); BigDecimal upperBound = modeBD.add(new BigDecimal(ADD_FOR_UB)).setScale(SCALE, BigDecimal.ROUND_HALF_UP); BigDecimal probLessThan = new BigDecimal(norm.cumulativeProbability(lowerBound.doubleValue())) .setScale(SCALE, BigDecimal.ROUND_HALF_UP); BigDecimal probMoreThan = new BigDecimal("1") .subtract(new BigDecimal(norm.cumulativeProbability(upperBound.doubleValue())).setScale(SCALE, BigDecimal.ROUND_HALF_UP)); BigDecimal probEqualTo = new BigDecimal("1").subtract(probLessThan).subtract(probMoreThan) .setScale(SCALE, BigDecimal.ROUND_HALF_UP); BigDecimal summed = probLessThan.add(probEqualTo).add(probMoreThan).setScale(SCALE, BigDecimal.ROUND_HALF_UP); if (!summed.equals(new BigDecimal("1").setScale(SCALE, BigDecimal.ROUND_HALF_UP))) { probLessThan = probLessThan.divide(summed, SCALE, BigDecimal.ROUND_HALF_UP); probMoreThan = probMoreThan.divide(summed, SCALE, BigDecimal.ROUND_HALF_UP); probEqualTo = probEqualTo.divide(summed, SCALE, BigDecimal.ROUND_HALF_UP); } BigDecimal[] probs = { probLessThan, probEqualTo, probMoreThan }; return probs; } catch (MathIllegalStateException me) { me.getStackTrace(); } return null; }
From source file:pyromaniac.Algorithm.QuinceOUFrequencyTable.java
private BigDecimal[] _calculateProbabilitiesHelper(int mode) { BigDecimal sd = new BigDecimal("0.04").add(new BigDecimal(mode).multiply(new BigDecimal("0.03"))); BigDecimal modeBD = new BigDecimal(mode); BigDecimal lowerBound = modeBD.subtract(new BigDecimal(SUBTRACT_FOR_LB)).setScale(SCALE, BigDecimal.ROUND_HALF_UP); BigDecimal upperBound = modeBD.add(new BigDecimal(ADD_FOR_UB)).setScale(SCALE, BigDecimal.ROUND_HALF_UP); NormalDistribution norm = new NormalDistribution(mode, sd.doubleValue()); try {/*from w ww .ja v a 2s . c o m*/ BigDecimal probLessThan = new BigDecimal(norm.cumulativeProbability(lowerBound.doubleValue())) .setScale(SCALE, BigDecimal.ROUND_HALF_UP); BigDecimal probMoreThan = new BigDecimal("1") .subtract(new BigDecimal(norm.cumulativeProbability(upperBound.doubleValue()))) .setScale(SCALE, BigDecimal.ROUND_HALF_UP); BigDecimal probEqualTo = new BigDecimal("1").subtract(probLessThan).subtract(probMoreThan) .setScale(SCALE, BigDecimal.ROUND_HALF_UP); BigDecimal totalProb = probLessThan.add(probEqualTo).add(probMoreThan).setScale(SCALE, BigDecimal.ROUND_HALF_UP); if (!totalProb.equals(new BigDecimal("1").setScale(SCALE, BigDecimal.ROUND_HALF_UP))) { probLessThan = probLessThan.divide(totalProb, SCALE, BigDecimal.ROUND_HALF_UP); probMoreThan = probMoreThan.divide(totalProb, SCALE, BigDecimal.ROUND_HALF_UP); probEqualTo = probEqualTo.divide(totalProb, SCALE, BigDecimal.ROUND_HALF_UP); } BigDecimal[] probs = { probLessThan, probEqualTo, probMoreThan }; return probs; } catch (MathIllegalStateException me) { me.getStackTrace(); } return null; }
From source file:rndvecgen.RandomVecGen.java
private void init() throws Exception { DocVector.initVectorRange(prop);//from w w w .j a va 2s. c o m numPoints = Integer.parseInt(prop.getProperty("syntheticdata.numsamples")); numGaussians = Integer.parseInt(prop.getProperty("syntheticdata.numgaussians")); maxSpan = Integer.parseInt(prop.getProperty("syntheticdata.maxSpan")); diagonalCovMatrix = Boolean.parseBoolean(prop.getProperty("syntheticdata.diagonalcov")); min = Float.parseFloat(prop.getProperty("syntheticdata.min")); max = Float.parseFloat(prop.getProperty("syntheticdata.max")); numDimensions = Integer.parseInt(prop.getProperty("vec.numdimensions")); final int NUM_MU_GEN = 5; float delta = (max - min) / NUM_MU_GEN; muGen = new NormalDistribution[NUM_MU_GEN]; for (int i = 0; i < NUM_MU_GEN; i++) { muGen[i] = new NormalDistribution(min + i * delta, Math.random() * maxSpan); } List<Pair<Double, MultivariateNormalDistribution>> components = new ArrayList<>(); for (int i = 0; i < numGaussians; i++) { components.add(new Pair(new Double(1 / (double) numGaussians), genRandom(i))); } // Ensure that we can reproduce the results... RandomGenerator rg = new JDKRandomGenerator(); rg.setSeed(SEED); this.mixtureDist = new MixtureMultivariateNormalDistribution(rg, components); }
From source file:SimulationStructure.Statistics.java
public double[] getSimTimeIS(double confidenceLevel) { double realLevel = 1 - ((1 - confidenceLevel) / 2); double[] confidenceInterval = new double[2]; double avg = simTimeTotal / replicationCount; double S = Math.sqrt((simTimeTotalPower / replicationCount) - Math.pow(avg, 2)); double distrib = 0; if (replicationCount > 1) { if (replicationCount < 30) { TDistribution student = new TDistribution(replicationCount); distrib = student.inverseCumulativeProbability(realLevel); } else {//from ww w . j av a2s. c o m NormalDistribution normal = new NormalDistribution(0, 1); distrib = normal.inverseCumulativeProbability(realLevel); } confidenceInterval[0] = avg - (distrib * S / Math.sqrt((replicationCount - 1))); confidenceInterval[1] = avg + (distrib * S / Math.sqrt((replicationCount - 1))); } else { return new double[] { 0, 0 }; } return confidenceInterval; }
From source file:topt.FXMLDocumentController.java
public double computeNrStDev(double mean, double stDev, double expectedError) { System.out.println(mean + " " + stDev); NormalDistribution normalDistribution = new NormalDistribution(mean, stDev); double nrOfStDev = (mean - normalDistribution.inverseCumulativeProbability(expectedError / 2)) / stDev; System.out.println(normalDistribution.inverseCumulativeProbability(expectedError / 2)); System.out.println(nrOfStDev); nrOfStDev = roundResult(nrOfStDev);//from ww w. j ava2 s . c om this.pstwo.setText("Prawdopodobientwo bdu wynosi " + Double.toString(nrOfStDev) + "%"); System.out.println("Liczba odchylen" + Double.toString(nrOfStDev)); // this.nrOfStDevLabel.setText("Liczba ochyle standardowych przy danym bdzie : " + Double.toString(nrOfStDev)); return nrOfStDev; }