List of usage examples for org.apache.commons.math3.stat.descriptive.moment StandardDeviation StandardDeviation
public StandardDeviation()
From source file:com.graphhopper.jsprit.core.algorithm.acceptor.SchrimpfInitialThresholdGenerator.java
@Override public void informAlgorithmStarts(VehicleRoutingProblem problem, VehicleRoutingAlgorithm algorithm, Collection<VehicleRoutingProblemSolution> solutions) { logger.info("prepare schrimpfAcceptanceFunction, i.e. determine initial threshold"); double now = System.currentTimeMillis(); /*//from w ww.j a va 2 s.c o m * randomWalk to determine standardDev */ final double[] results = new double[nOfRandomWalks]; Jsprit.Builder builder = new GreedySchrimpfFactory().createGreedyAlgorithmBuilder(problem); builder.setCustomAcceptor(new AcceptNewRemoveFirst(1)); VehicleRoutingAlgorithm vra = builder.buildAlgorithm(); vra.setMaxIterations(nOfRandomWalks); vra.getAlgorithmListeners().addListener(new IterationEndsListener() { @Override public void informIterationEnds(int iteration, VehicleRoutingProblem problem, Collection<VehicleRoutingProblemSolution> solutions) { double result = Solutions.bestOf(solutions).getCost(); // logger.info("result={}", result); results[iteration - 1] = result; } }); vra.searchSolutions(); StandardDeviation dev = new StandardDeviation(); double standardDeviation = dev.evaluate(results); double initialThreshold = standardDeviation / 2; schrimpfAcceptance.setInitialThreshold(initialThreshold); logger.info("took {} seconds", ((System.currentTimeMillis() - now) / 1000.0)); logger.debug("initial threshold: {}", initialThreshold); logger.info("---------------------------------------------------------------------"); }
From source file:com.itemanalysis.psychometrics.rasch.RatingScaleThresholds.java
/** * The number of categories is maxCategory+1 * * @param groupId identifier for this rating scale category group * @param maxCategory largest category rho *//* w w w.j ava 2 s.co m*/ public RatingScaleThresholds(String groupId, int maxCategory) { this.groupId = groupId; numberOfCategories = maxCategory; thresholds = new double[numberOfCategories]; proposalThresholds = new double[numberOfCategories]; standardErrors = new double[numberOfCategories]; mean = new Mean(); sd = new StandardDeviation(); items = new ArrayList<RatingScaleItem>(); rsm = new RatingScaleModel(); fit = new LinkedHashMap<Integer, CategoryFitStatistics>(); initializeAllFit(); }
From source file:bigtweet.model.ComparingToRealData.java
/** * Update the min distnce , mean and sd in all experiments and the min * distance mean and sd for one experiments with several seeds in this * dataset/*ww w . ja v a 2 s . c o m*/ * The experiment id is passed */ @Override public void updateMetricsForParametersValues(JSONArray parametersValues, int parametersValuesIndex) { DecimalFormatSymbols otherSymbols = new DecimalFormatSymbols(Locale.US); DecimalFormat df = new DecimalFormat("0.000", otherSymbols); //get distances array for each seed Object obDist[] = seedAndDistances.values().toArray(); double distances[] = new double[obDist.length]; for (int i = 0; i < obDist.length; i++) { distances[i] = (double) obDist[i]; } //get metrics for this experiment and this dataset double minDistance = (new Min()).evaluate(distances); double mean = (new Mean()).evaluate(distances); double sd = (new StandardDeviation()).evaluate(distances); //put in metricsForLastExperiment, JSON object metricsForLastExperiment = new JSONObject();//create new object for the last experiment metricsForLastExperiment.put("minDistance", df.format(minDistance)); metricsForLastExperiment.put("mean", df.format(mean)); metricsForLastExperiment.put("sd", df.format(sd)); JSONObject distancesJSON = new JSONObject(); for (Map.Entry<Long, Double> entry : seedAndDistances.entrySet()) { distancesJSON.put(entry.getKey().toString(), df.format(entry.getValue())); } metricsForLastExperiment.put("randomSeedsAndDistances", distancesJSON); //update metrics for all experiments if (mean < this.bestMean) { //System.out.println(name + " bestmean " + df.format(bestMean).toString() + " mean " + df.format(mean).toString()); bestMean = mean; experimentWithBestMean = parametersValuesIndex; metricsForAllExperiments.put("bestMean", df.format(bestMean)); metricsForAllExperiments.put("experimentWithBestMean", parametersValuesIndex); } if (minDistance < this.bestDistance) { //System.out.println(name + " bestdos " + df.format(bestDistance).toString() + " dis " + df.format(minDistance).toString()); bestDistance = minDistance; experimentWithBestDistance = parametersValuesIndex; metricsForAllExperiments.put("bestDistance", df.format(minDistance)); metricsForAllExperiments.put("experimentWithBestDistance", parametersValuesIndex); } }
From source file:gamlss.distributions.NO.java
/** Calculates initial value of sigma. * @param y - vector of values of response variable * @return vector of initial values of sigma *///from w ww . ja v a2 s . co m private ArrayRealVector setSigmaInitial(final ArrayRealVector y) { tempV = new ArrayRealVector(y.getDimension()); final double ySD = new StandardDeviation().evaluate(y.getDataRef()); tempV.set(ySD); return tempV; }
From source file:bigtweet.model.StudyingBeacons.java
@Override public void updateMetricsForParametersValues(JSONArray parametersValues, int parametersValuesIndex) { DecimalFormatSymbols otherSymbols = new DecimalFormatSymbols(Locale.US); DecimalFormat df = new DecimalFormat("0.000", otherSymbols); //get infected array for each seed double infected[] = new double[seedAndStates.values().size()]; for (int i = 0; i < infected.length; i++) { //number of infected for each seed //System.out.println( ((Map<String,Integer>) (seedAndStates.values().toArray()[i])).get("INFECTED")); infected[i] = (new Double( ((Map<String, Integer>) (seedAndStates.values().toArray()[i])).get("ENDORSER"))); }//from w ww.j a v a 2s . c om //get metrics for this experiment and this dataset double minInfected = (new Min()).evaluate(infected); double mean = (new Mean()).evaluate(infected); double sd = (new StandardDeviation()).evaluate(infected); //put in metricsForLastExperiment, JSON object metricsForLastExperiment = new JSONObject();//create new object for the last experiment metricsForLastExperiment.put("minEndorsers", df.format(minInfected)); metricsForLastExperiment.put("mean", df.format(mean)); metricsForLastExperiment.put("sd", df.format(sd)); JSONObject statesJSON = new JSONObject(); for (Map.Entry<Long, Map<String, Integer>> entry : seedAndStates.entrySet()) { statesJSON.put(entry.getKey().toString(), entry.getValue()); } metricsForLastExperiment.put("randomSeedsAndStates", statesJSON); //update metrics for all experiments if (mean < this.bestMean) { //System.out.println(name + " bestmean " + df.format(bestMean).toString() + " mean " + df.format(mean).toString()); bestMean = mean; experimentWithBestMean = parametersValuesIndex; metricsForAllExperiments.put("bestMean", df.format(bestMean)); metricsForAllExperiments.put("experimentWithBestMean", parametersValuesIndex); } if (minInfected < this.bestEndorser) { bestEndorser = minInfected; experimentWithBestEndorser = parametersValuesIndex; metricsForAllExperiments.put("bestEndorsers", df.format(minInfected)); metricsForAllExperiments.put("experimentWithBestEndorsers", parametersValuesIndex); } generateBatchOuputForChart(parametersValues, parametersValuesIndex, mean); }
From source file:eu.crisis_economics.abm.algorithms.series.TestAbstractSeries.java
/** * Assert that the long term mean of values drawn from a {@link RandomSeries} object * is as expected./*from w w w . j av a 2 s . c o m*/ * * @param series (<code>S</code>) <br> * The {@link RandomSeries} object to test. * @param numberOfSamples <br> * The number of samples to draw from <code>S</code>. * @param expectedLongTermStd <br> * The expected long term standard deviation of the series. */ protected void assertLongTermStd(RandomSeries series, final int numberOfSamples, final double expectedLongTermStd) { final List<Double> observations = new ArrayList<Double>(); for (int i = 0; i < numberOfSamples; ++i) observations.add(series.next()); final double observedMean = (new StandardDeviation()) .evaluate(ArrayUtils.toPrimitive(observations.toArray(new Double[observations.size()]))); Assert.assertEquals(observedMean, expectedLongTermStd, 1.e-1); }
From source file:eagle.security.userprofile.model.eigen.UserProfileEigenModeler.java
private void computeStats(RealMatrix m) { if (m.getColumnDimension() != this.cmdTypes.length) { LOG.error("Please fix the commands list in config file"); return;// w ww. ja v a 2s . c o m } statistics = new UserCommandStatistics[m.getColumnDimension()]; for (int i = 0; i < m.getColumnDimension(); i++) { UserCommandStatistics stats = new UserCommandStatistics(); stats.setCommandName(this.cmdTypes[i]); RealVector colData = m.getColumnVector(i); StandardDeviation deviation = new StandardDeviation(); double stddev = deviation.evaluate(colData.toArray()); //LOG.info("stddev is nan?" + (stddev == Double.NaN? "yes":"no")); if (stddev <= lowVarianceVal) stats.setLowVariant(true); else stats.setLowVariant(false); stats.setStddev(stddev); Mean mean = new Mean(); double mu = mean.evaluate(colData.toArray()); //LOG.info("mu is nan?" + (mu == Double.NaN? "yes":"no")); stats.setMean(mu); statistics[i] = stats; } }
From source file:com.github.rinde.rinsim.scenario.generator.NHPoissonProcessTest.java
@Ignore @Test/*from ww w . j ava2 s. c om*/ public void test() throws IOException { final int numSamples = 100; final long lengthOfScenario = 4 * 60 * 60 * 1000; final double period = 30 * 60 * 1000; final int[] orders = new int[] { 10, 20, 30, 40, 50, 75, 100, 150, 200, 500 }; final List<Point> dataPoints = newArrayList(); final RandomGenerator rng = new MersenneTwister(123); final List<Double> relHeights = newArrayList(); for (int i = 0; i < 10; i++) { relHeights.add(-.999 + i * .001); } for (int i = 0; i < 100; i++) { relHeights.add(-.99 + i * .05); } // for (int i = 0; i < 50; i++) { // relHeights.add(3.99 + (i * .5)); // } Files.createParentDirs(new File("files/test/times/relheight-dynamism.txt")); final BufferedWriter writer = Files.newWriter(new File("files/test/times/relheight-dynamism.txt"), Charsets.UTF_8); for (int k = 0; k < orders.length; k++) { for (int i = 0; i < relHeights.size(); i++) { final double d = relHeights.get(i); final double relHeight = d;// -.99 + (j * .05); // final double period = 3600d; final double ordersPerPeriod = orders[k] / (lengthOfScenario / period); final IntensityFunction intensity = IntensityFunctions.sineIntensity().height(d).period(period) .area(ordersPerPeriod).build(); System.out.printf("%1d relative height: %1.3f%n", i, relHeight); // final List<Double> sineTimes = FluentIterable // .from( // ContiguousSet.create(Range.closedOpen(0L, lengthOfScenario), // DiscreteDomain.longs())) // .transform(Conversion.LONG_TO_DOUBLE) // .transform(intensity) // .toList(); // Analysis // .writeLoads( // sineTimes, // new File( // "files/test/times/sine/sine-" // + Strings.padStart(Integer.toString(i), 2, '0') // + ".intens")); final TimeSeriesGenerator generator = TimeSeries.nonHomogenousPoisson(lengthOfScenario, intensity); double max = 0; double sum = 0; final StandardDeviation sd = new StandardDeviation(); final List<Double> dynamismValues = newArrayList(); for (int j = 0; j < numSamples; j++) { List<Double> times = generator.generate(rng.nextLong()); while (times.size() < 2) { times = generator.generate(rng.nextLong()); } final double dyn = Metrics.measureDynamism(times, lengthOfScenario); dynamismValues.add(dyn); sd.increment(dyn); sum += dyn; max = Math.max(max, dyn); // if (j < 3) { // // System.out.printf("%1.3f%% %d%n", dyn * 100, times.size()); // Analysis.writeTimes( // lengthOfScenario, // times, // new File( // "files/test/times/orders" // + Strings.padStart(Integer.toString(i), 2, '0') + "_" // + j // + "-" + (dyn * 100) // + ".times")); // } } try { writer.append(Double.toString(relHeight)); writer.append(" "); writer.append(Integer.toString(orders[k])); writer.append(" "); writer.append(Joiner.on(" ").join(dynamismValues).toString()); writer.append("\n"); } catch (final IOException e) { checkState(false); } System.out.printf(" > dyn %1.3f+-%1.3f%n", +(sum / numSamples), sd.getResult()); dataPoints.add(new Point(relHeight, sum / numSamples)); } } writer.close(); // Analysis.writeLocationList(dataPoints, new File( // "files/test/times/intensity-analysis.txt")); }
From source file:com.cloudera.oryx.rdf.common.information.NumericInformationTest.java
@Test public void testInformationNumericFeature() { ExampleSet exampleSet = examplesForFeaturesValues(new float[] { 0.0f, 1.0f, 2.0f, 4.0f }, new float[] { 1.0f, 1.5f, 2.0f, 5.0f }); List<Decision> decisions = Decision.decisionsFromExamples(exampleSet, 0, 100); assertEquals(3, decisions.size());/*from w w w . j av a 2 s. c om*/ assertEquals(3.0f, ((NumericDecision) decisions.get(0)).getThreshold()); assertEquals(1.5f, ((NumericDecision) decisions.get(1)).getThreshold()); assertEquals(0.5f, ((NumericDecision) decisions.get(2)).getThreshold()); Pair<Decision, Double> best = NumericalInformation.bestGain(decisions, exampleSet); assertEquals(1.5f, ((NumericDecision) best.getFirst()).getThreshold()); StandardDeviation all = new StandardDeviation(); all.incrementAll(new double[] { 1.0, 1.5, 2.0, 5.0 }); StandardDeviation positive = new StandardDeviation(); positive.incrementAll(new double[] { 1.0, 1.5 }); StandardDeviation negative = new StandardDeviation(); negative.incrementAll(new double[] { 2.0, 5.0 }); assertEquals(differentialEntropy(all) - (2.0 / 4.0) * differentialEntropy(positive) - (2.0 / 4.0) * differentialEntropy(negative), best.getValue().doubleValue()); }
From source file:br.unicamp.ic.recod.gpsi.applications.gpsiJGAPEvolver.java
@Override public void run() throws InvalidConfigurationException, InterruptedException, Exception { int i, j, k;// www.j a v a2s . c om byte nFolds = 5; gpsiDescriptor descriptor; gpsiMLDataset mlDataset; gpsiVoxelRawDataset dataset; GPGenotype gp; double[][] fitnessCurves; String[] curveLabels = new String[] { "train", "train_val", "val" }; double bestScore, currentScore; IGPProgram current, bestVal; Mean mean = new Mean(); StandardDeviation sd = new StandardDeviation(); double validationScore, trainScore, bestValidationScore, bestTrainScore; double[][][] samples; for (byte f = 0; f < nFolds; f++) { System.out.println("\nRun " + (f + 1) + "\n"); rawDataset.assignFolds(new byte[] { f, (byte) ((f + 1) % nFolds), (byte) ((f + 2) % nFolds) }, new byte[] { (byte) ((f + 3) % nFolds) }, new byte[] { (byte) ((f + 4) % nFolds) }); dataset = (gpsiVoxelRawDataset) rawDataset; gp = create(config, dataset.getnBands(), fitness); // 0: train, 1: train_val, 2: val fitnessCurves = new double[super.numGenerations][]; current = null; bestVal = null; bestScore = -Double.MAX_VALUE; bestValidationScore = -1.0; bestTrainScore = -1.0; for (int generation = 0; generation < super.numGenerations; generation++) { gp.evolve(1); gp.getGPPopulation().sortByFitness(); if (this.dumpGens) { double[][][] dists; descriptor = new gpsiScalarSpectralIndexDescriptor( new gpsiJGAPVoxelCombiner(fitness.getB(), gp.getGPPopulation().getGPPrograms()[0])); mlDataset = new gpsiMLDataset(descriptor); mlDataset.loadWholeDataset(rawDataset, true); dists = (new gpsiWholeSampler()).sample(mlDataset.getTrainingEntities(), this.classLabels); for (i = 0; i < this.classLabels.length; i++) { stream.register(new gpsiDoubleCsvIOElement(dists[i], null, "gens/f" + (f + 1) + "/" + classLabels[i] + "/" + (generation + 1) + ".csv")); } } for (i = 0; i < super.validation; i++) { current = gp.getGPPopulation().getGPPrograms()[i]; descriptor = new gpsiScalarSpectralIndexDescriptor( new gpsiJGAPVoxelCombiner(fitness.getB(), current)); mlDataset = new gpsiMLDataset(descriptor); mlDataset.loadWholeDataset(rawDataset, true); samples = this.fitness.getSampler().sample(mlDataset.getValidationEntities(), classLabels); validationScore = fitness.getScore().score(samples); trainScore = current.getFitnessValue() - 1.0; currentScore = mean.evaluate(new double[] { trainScore, validationScore }) - sd.evaluate(new double[] { trainScore, validationScore }); if (currentScore > bestScore) { bestVal = current; bestScore = currentScore; bestTrainScore = trainScore; bestValidationScore = validationScore; } } if (validation > 0) { best = new IGPProgram[2]; best[0] = gp.getAllTimeBest(); best[1] = bestVal; fitnessCurves[generation] = new double[] { best[0].getFitnessValue() - 1.0, bestTrainScore, bestValidationScore }; System.out.printf("%3dg: %.4f %.4f %.4f\n", generation + 1, fitnessCurves[generation][0], fitnessCurves[generation][1], fitnessCurves[generation][2]); } else { best = new IGPProgram[1]; best[0] = gp.getAllTimeBest(); fitnessCurves[generation] = new double[] { gp.getAllTimeBest().getFitnessValue() - 1.0 }; System.out.printf("%3dg: %.4f\n", generation + 1, fitnessCurves[generation][0]); } } stream.register(new gpsiDoubleCsvIOElement(fitnessCurves, curveLabels, "curves/f" + (f + 1) + ".csv")); System.out.println("Best solution for trainning: " + gp.getAllTimeBest().toStringNorm(0)); stream.register(new gpsiStringIOElement(gp.getAllTimeBest().toStringNorm(0), "programs/f" + (f + 1) + "train.program")); if (validation > 0) { System.out.println("Best solution for trainning and validation: " + bestVal.toStringNorm(0)); stream.register(new gpsiStringIOElement(bestVal.toStringNorm(0), "programs/f" + (f + 1) + "train_val.program")); } descriptor = new gpsiScalarSpectralIndexDescriptor(new gpsiJGAPVoxelCombiner(fitness.getB(), best[0])); gpsi1NNToMomentScalarClassificationAlgorithm classificationAlgorithm = new gpsi1NNToMomentScalarClassificationAlgorithm( new Mean()); gpsiClassifier classifier = new gpsiClassifier(descriptor, classificationAlgorithm); classifier.fit(this.rawDataset.getTrainingEntities()); classifier.predict(this.rawDataset.getTestEntities()); int[][] confusionMatrix = classifier.getConfusionMatrix(); stream.register(new gpsiIntegerCsvIOElement(confusionMatrix, null, "confusion_matrices/f" + (f + 1) + "_train.csv")); if (validation > 0) { descriptor = new gpsiScalarSpectralIndexDescriptor( new gpsiJGAPVoxelCombiner(fitness.getB(), best[1])); classificationAlgorithm = new gpsi1NNToMomentScalarClassificationAlgorithm(new Mean()); classifier = new gpsiClassifier(descriptor, classificationAlgorithm); classifier.fit(this.rawDataset.getTrainingEntities()); classifier.predict(this.rawDataset.getTestEntities()); confusionMatrix = classifier.getConfusionMatrix(); stream.register(new gpsiIntegerCsvIOElement(confusionMatrix, null, "confusion_matrices/f" + (f + 1) + "_train_val.csv")); } } }