List of usage examples for org.apache.commons.math3.stat.descriptive DescriptiveStatistics clear
public void clear()
From source file:async.nio2.Main.java
private static DescriptiveStatistics combine(DescriptiveStatistics stats1, DescriptiveStatistics stats2) { Arrays.stream(stats2.getValues()).forEach(d -> stats1.addValue(d)); stats2.clear(); return stats1; }
From source file:async.nio2.Main.java
private static String toEvaluationString(DescriptiveStatistics stats) { String data = String.format( "0.50 Percentile = %8.2f, " + "0.90 Percentile = %8.2f, " + "0.99 Percentile = %8.2f, " + "min = %8.2f, " + "max = %8.2f", stats.getMean(), stats.getPercentile(90), stats.getPercentile(99), stats.getMin(), stats.getMax()); stats.clear(); return data;/*from w w w. j a v a2 s. c o m*/ }
From source file:mase.spec.SpecialisationStats.java
@Override public void postPreBreedingExchangeStatistics(EvolutionState state) { super.postPreBreedingExchangeStatistics(state); SpecialisationExchanger exc = (SpecialisationExchanger) state.exchanger; state.output.print(state.generation + " " + exc.metaPops.size(), log); // metapop size (min, mean, max) DescriptiveStatistics ds = new DescriptiveStatistics(); for (MetaPopulation mp : exc.metaPops) { ds.addValue(mp.populations.size()); }/*from w w w.j a v a2s . c om*/ state.output.print(" " + ds.getMin() + " " + ds.getMean() + " " + ds.getMax(), log); // metapop dispersion (min, mean, max) ds.clear(); for (MetaPopulation mp : exc.metaPops) { double dispersion = 0; for (Integer i : mp.populations) { for (Integer j : mp.populations) { dispersion += exc.distanceMatrix[i][j]; } } ds.addValue(dispersion / (mp.populations.size() * mp.populations.size())); } state.output.print(" " + ds.getMin() + " " + ds.getMean() + " " + ds.getMax(), log); // total number of merges and splits int count = 0; for (MetaPopulation mp : exc.metaPops) { count += mp.waitingIndividuals.size(); } state.output.print(" " + count + " " + exc.splits, log); for (int i = 0; i < exc.prototypeSubs.length; i++) { // MetaPop to which they belong MetaPopulation pop = null; for (int m = 0; m < exc.metaPops.size(); m++) { if (exc.metaPops.get(m).populations.contains(i)) { pop = exc.metaPops.get(m); state.output.print(" " + m, log); } } // Population dispersion state.output.print(" " + exc.originalMatrix[i][i], log); // Normalised distance to internal pops -- include itself -- 1 ds.clear(); for (Integer p : pop.populations) { ds.addValue(exc.distanceMatrix[i][p]); } state.output.print(" " + ds.getMin() + " " + ds.getMean() + " " + ds.getMax(), log); // Normalised distance to external pops ds.clear(); for (MetaPopulation mp : exc.metaPops) { if (mp != pop) { for (Integer p : mp.populations) { ds.addValue(exc.distanceMatrix[i][p]); } } } if (ds.getN() == 0) { ds.addValue(1); } state.output.print(" " + ds.getMin() + " " + ds.getMean() + " " + ds.getMax(), log); } String str = ""; for (MetaPopulation mp : exc.metaPops) { str += mp + " ; "; } state.output.message(str); /*for(double[] m : exc.distanceMatrix) { state.output.message(Arrays.toString(m)); }*/ // representatives /*MetaEvaluator me = (MetaEvaluator) state.evaluator; MultiPopCoevolutionaryEvaluator2 baseEval = (MultiPopCoevolutionaryEvaluator2) me.getBaseEvaluator(); Individual[][] elites = baseEval.getEliteIndividuals(); ds.clear(); for(MetaPopulation mp : exc.metaPops) { HashSet<Individual> inds = new HashSet<Individual>(); for(Integer p : mp.populations) { inds.add(elites[p][0]); } ds.addValue(inds.size() / (double) mp.populations.size()); } state.output.print(" " + ds.getMin() + " " + ds.getMean() + " " + ds.getMax(), log);*/ state.output.println("", log); }
From source file:mase.spec.HybridStat.java
@Override public void postPreBreedingExchangeStatistics(EvolutionState state) { super.postPreBreedingExchangeStatistics(state); AbstractHybridExchanger exc = (AbstractHybridExchanger) state.exchanger; // generation, evaluations, and number of metapops state.output.print(state.generation + " " + ((MaseProblem) state.evaluator.p_problem).getTotalEvaluations() + " " + exc.metaPops.size(), log); DescriptiveStatistics ds = new DescriptiveStatistics(); for (MetaPopulation mp : exc.metaPops) { ds.addValue(mp.agents.size());// w w w .ja va 2 s . c om } // metapop size (min, mean, max) state.output.print(" " + ds.getMin() + " " + ds.getMean() + " " + ds.getMax(), log); // metapop mean and max age ds.clear(); for (MetaPopulation mp : exc.metaPops) { ds.addValue(mp.age); } state.output.print(" " + ds.getMean() + " " + ds.getMax(), log); // number of splits and merges in this generation + total number of splits and merges totalMerges += exc.merges; totalSplits += exc.splits; state.output.print(" " + exc.merges + " " + exc.splits + " " + totalMerges + " " + totalSplits, log); if (exc instanceof StochasticHybridExchanger) { StochasticHybridExchanger she = (StochasticHybridExchanger) exc; // metapop difference to others ds.clear(); for (int i = 0; i < she.distanceMatrix.length; i++) { for (int j = i + 1; j < she.distanceMatrix.length; j++) { if (!Double.isInfinite(she.distanceMatrix[i][j]) && !Double.isNaN(she.distanceMatrix[i][j])) { ds.addValue(she.distanceMatrix[i][j]); } } } if (ds.getN() > 0) { state.output.print(" " + ds.getN() + " " + ds.getMin() + " " + ds.getMean() + " " + ds.getMax(), log); } else { state.output.print(" 0 0 0 0", log); } //printMatrix(she.distanceMatrix, state); } state.output.println("", log); /*for(MetaPopulation mp : exc.metaPops) { StringBuilder sb = new StringBuilder(); sb.append(String.format("%3d", mp.age)).append(" - ").append(mp.toString()); if(!mp.foreigns.isEmpty()) { sb.append(" - Foreigns:"); } for(Foreign f : mp.foreigns) { sb.append(" ").append(f.origin).append("(").append(f.age).append(")"); } state.output.message(sb.toString()); }*/ /*for(MetaPopulation mp : exc.metaPops) { state.output.message(mp.age + "/" + mp.lockDown); }*/ }
From source file:iDynoOptimizer.MOEAFramework26.src.org.moeaframework.analysis.diagnostics.LinePlot.java
/** * Generates the quantile series for the specified key. * /*from ww w .j a v a 2 s. c o m*/ * @param key the key identifying which result to plot * @param dataset the dataset to store the generated series */ protected void generateQuantileSeries(ResultKey key, YIntervalSeriesCollection dataset) { List<DataPoint> dataPoints = new ArrayList<DataPoint>(); for (Accumulator accumulator : controller.get(key)) { if (!accumulator.keySet().contains(metric)) { continue; } for (int i = 0; i < accumulator.size(metric); i++) { dataPoints.add(new DataPoint((Integer) accumulator.get("NFE", i), ((Number) accumulator.get(metric, i)).doubleValue())); } } Collections.sort(dataPoints); YIntervalSeries series = new YIntervalSeries(key); DescriptiveStatistics statistics = new DescriptiveStatistics(); int index = 0; int currentNFE = RESOLUTION; while (index < dataPoints.size()) { DataPoint point = dataPoints.get(index); if (point.getNFE() <= currentNFE) { statistics.addValue(point.getValue()); index++; } else { if (statistics.getN() > 0) { series.add(currentNFE, statistics.getPercentile(50), statistics.getPercentile(25), statistics.getPercentile(75)); } statistics.clear(); currentNFE += RESOLUTION; } } if (statistics.getN() > 0) { //if only entry, add extra point to display non-zero width if (series.isEmpty()) { series.add(currentNFE - RESOLUTION, statistics.getPercentile(50), statistics.getPercentile(25), statistics.getPercentile(75)); } series.add(currentNFE, statistics.getPercentile(50), statistics.getPercentile(25), statistics.getPercentile(75)); } dataset.addSeries(series); }
From source file:info.financialecology.finance.utilities.datastruct.VersatileDataTable.java
/** * Computes the column-wise standard deviation of values whose column * key matches any of the labels passed as arguments. The result is * appended to the bottom of the table and prefixed with a '#' symbol * to prevent methods such as {@link #columnMultiply(double, String...)} * to operate on them./*from w w w .j ava 2 s . c om*/ * @param rowLabel the name of the result as it should appear in the * leftmost row * @param labels a set of filters to match column keys against */ public void insertColumnStdev(String rowLabel, String... labels) { DescriptiveStatistics stats = new DescriptiveStatistics(); List<String> columnKeys = getColumnKeys(); List<String> rowKeys = getRowKeys(); rowLabel = "# " + rowLabel; for (String label : labels) { for (String columnKey : columnKeys) { if (isSubsetOf(columnKey, label)) { for (String rowKey : rowKeys) { if (!StringUtils.startsWith(rowKey, "#")) { stats.addValue(getValue(rowKey, columnKey).doubleValue()); } } } setValue(stats.getStandardDeviation(), rowLabel, columnKey); stats.clear(); } } }
From source file:fr.inria.eventcloud.benchmarks.pubsub.PublishSubscribeBenchmark.java
private Event[] computeGenerationsAndSelectBest(EventCloudDeployer deployer, SemanticZone[] zones, Node[] fixedPredicates, int nbEvents) { List<Event[]> eventSets = new ArrayList<Event[]>(this.nbEventGenerationRounds); List<Integer[]> eventSetsDistribution = new ArrayList<Integer[]>(this.nbEventGenerationRounds); for (int i = 0; i < this.nbEventGenerationRounds; i++) { Event[] generatedEvents = this.generateEvents(deployer, zones, nbEvents, fixedPredicates); Integer[] distribution = new Integer[zones.length]; for (int j = 0; j < distribution.length; j++) { distribution[j] = 0;/*from w w w. j a va2 s . c o m*/ } for (Event e : generatedEvents) { CompoundEvent ce = (CompoundEvent) e; for (int j = 0; j < ce.size(); j++) { Quadruple q = ce.get(j); boolean belongs = false; for (int k = 0; k < zones.length; k++) { if (zones[k].contains(SemanticPointFactory.newSemanticCoordinate(q))) { distribution[k] = distribution[k] + 1; belongs = true; } } if (!belongs) { throw new RuntimeException("Generated quadruple is not managed by the network: " + q); } } } eventSets.add(generatedEvents); eventSetsDistribution.add(distribution); } DescriptiveStatistics stats = new DescriptiveStatistics(); int selectedIndex = 0; double bestMeanDeviation = Integer.MAX_VALUE; for (int i = 0; i < eventSetsDistribution.size(); i++) { for (int j = 0; j < eventSetsDistribution.get(i).length; j++) { stats.addValue(eventSetsDistribution.get(i)[j]); } double mean = stats.getMean(); // http://mathworld.wolfram.com/MeanDeviation.html double meanDeviation = 0; for (int j = 0; j < eventSetsDistribution.get(i).length; j++) { meanDeviation += Math.abs(eventSetsDistribution.get(i)[j] - mean); } meanDeviation /= eventSetsDistribution.get(i).length; if (meanDeviation < bestMeanDeviation) { bestMeanDeviation = meanDeviation; selectedIndex = i; } stats.clear(); } return eventSets.get(selectedIndex); }
From source file:org.lightjason.agentspeak.action.buildin.math.statistic.CClearStatistic.java
/** * clear a descriptive statistic//w w w. j a v a 2s .c om * * @param p_statistic statistic object * @return successful clear */ private static boolean apply(final DescriptiveStatistics p_statistic) { p_statistic.clear(); return true; }
From source file:org.lightjason.agentspeak.action.builtin.math.statistic.CClearStatistic.java
/** * clear a descriptive statistic//from www . j av a2s.c o m * * @param p_statistic statistic object * @return successful clear */ private static boolean apply(@Nonnull final DescriptiveStatistics p_statistic) { p_statistic.clear(); return true; }
From source file:org.wso2.carbon.ml.core.spark.models.ext.AnomalyDetectionModel.java
/** * This method is to get the percentile distances map * key : percentile value//www. jav a 2 s. c o m * value : distance value * This will return cluster boundary distance values with respect to each percentile */ private Map<Integer, Double> getPercentileDistancesMap(double percentileValue) { // Get a DescriptiveStatistics instance DescriptiveStatistics stats = new DescriptiveStatistics(); /* * key : percentile value * value : distance value */ Map<Integer, Double> percentilesMap = new HashMap<Integer, Double>(); // calculating percentile distance of each cluster for (int clusterIndex = 0; clusterIndex < clusterIndexToDistancesListMap.size(); clusterIndex++) { for (double distance : clusterIndexToDistancesListMap.get(clusterIndex)) { stats.addValue(distance); } double percentileDistance = stats.getPercentile(percentileValue); percentilesMap.put(clusterIndex, percentileDistance); stats.clear(); } return percentilesMap; }