List of usage examples for org.apache.commons.math.stat.descriptive DescriptiveStatistics DescriptiveStatistics
public DescriptiveStatistics()
From source file:org.matsim.contrib.socnetgen.sna.snowball.analysis.ResponseRateTask.java
@Override public void analyze(Graph g, Map<String, DescriptiveStatistics> stats) { SampledGraph graph = (SampledGraph) g; DescriptiveStatistics ds = new DescriptiveStatistics(); ds.addValue(SnowballStatistics.getInstance().responseRateTotal(graph.getVertices(), SnowballStatistics.getInstance().lastIteration(graph.getVertices()))); stats.put("responseRate", ds); if (getOutputDirectory() != null) { try {//from w w w. ja va2s. com BufferedWriter writer = new BufferedWriter( new FileWriter(getOutputDirectory() + "/responseRates.txt")); writer.write("iteration\tresponseRateTotal\tresponseRatePerIteration"); writer.newLine(); double[] rateTotal = SnowballStatistics.getInstance().responseRateTotal(graph.getVertices()); double[] rate = SnowballStatistics.getInstance().responseRatePerIteration(graph.getVertices()); for (int i = 0; i < rateTotal.length; i++) { writer.write(String.valueOf(i)); writer.write("\t"); writer.write(String.valueOf(rateTotal[i])); writer.write("\t"); writer.write(String.valueOf(rate[i])); writer.newLine(); } writer.close(); } catch (IOException e) { e.printStackTrace(); } } }
From source file:org.matsim.contrib.socnetgen.sna.snowball.analysis.SeedAPLTask.java
@Override public void analyze(Graph g, Map<String, DescriptiveStatistics> results) { SampledGraph graph = (SampledGraph) g; if (seeds == null) { seeds = new ArrayList<SampledVertex>(graph.getVertices().size()); for (SampledVertex vertex : graph.getVertices()) { Integer it = vertex.getIterationDetected(); if (it != null && it == -1) { seeds.add(vertex);//from www . java 2 s . c o m } } } AdjacencyMatrix<SampledVertex> y = new AdjacencyMatrix<SampledVertex>(graph); int[] seedIndices = new int[seeds.size()]; for (int i = 0; i < seeds.size(); i++) { seedIndices[i] = y.getIndex(seeds.get(i)); } Dijkstra dijkstra = new Dijkstra(y); DescriptiveStatistics stats = new DescriptiveStatistics(); DescriptiveStatistics statsDirect = new DescriptiveStatistics(); for (int i = 0; i < seedIndices.length; i++) { int idx_i = seedIndices[i]; dijkstra.run(idx_i, -1); for (int j = i + 1; j < seedIndices.length; j++) { int idx_j = seedIndices[j]; TIntArrayList path = dijkstra.getPath(idx_i, idx_j); if (path != null) { stats.addValue(path.size()); /* * filter indirect paths */ boolean indirect = false; for (int k = 1; k < path.size() - 1; k++) { SampledVertex v = y.getVertex(path.get(k)); if (v.getSeed() != seeds.get(i) && v.getSeed() != seeds.get(j)) { indirect = true; break; } } if (!indirect) statsDirect.addValue(path.size()); } } } results.put(KEY, stats); printStats(stats, KEY); String key2 = KEY + "_direct"; results.put(key2, statsDirect); printStats(statsDirect, key2); try { writeHistograms(stats, new LinearDiscretizer(1.0), KEY, false); writeHistograms(statsDirect, new LinearDiscretizer(1.0), key2, false); } catch (IOException e) { e.printStackTrace(); } }
From source file:org.neo4j.nlp.impl.util.VectorUtil.java
public static double getFeatureMatchDistribution(GraphDatabaseService db, Long patternId) { Transaction tx = db.beginTx();/* w w w .j a va 2s.c o m*/ Node startNode = db.getNodeById(patternId); // Feature match distribution List<Double> matches = IteratorUtil .asCollection(db.traversalDescription().depthFirst() .relationships(withName("HAS_CLASS"), Direction.OUTGOING).evaluator(Evaluators.fromDepth(1)) .evaluator(Evaluators.toDepth(1)).traverse(startNode).relationships()) .stream().map(p -> ((Integer) p.getProperty("matches")).doubleValue()).collect(Collectors.toList()); tx.success(); tx.close(); double variance = 1.0; if (matches.size() > 1) { Double[] matchArr = matches.toArray(new Double[matches.size()]); // Get the standard deviation DescriptiveStatistics ds = new DescriptiveStatistics(); matches.forEach(m -> ds.addValue(m.doubleValue() / StatUtils.sum(ArrayUtils.toPrimitive(matchArr)))); variance = ds.getStandardDeviation(); } return variance; }
From source file:org.openehealth.ipf.commons.test.performance.processingtime.ProcessingTimeDescriptiveStatistics.java
@Override protected void initializeStatisticsIfNecessary(MeasurementHistory history) { // initialize summary statistics only for measurements with name for (Measurement measurement : history.getMeasurements()) { if (!measurement.isNameEmpty()) { String name = measurement.getName(); if (!statisticsByMeasurementName.containsKey(name)) { statisticsByMeasurementName.put(name, new DescriptiveStatistics()); }//from w w w . jav a2 s. com } } }
From source file:org.ow2.clif.jenkins.parser.clif.ActionStatInfo.java
/** * Compute statistics and build associated graphs *///from ww w.j a va 2 s. c om public void compute() { // get double arrays valuesArray = values.getElements(); datesArray = dates.getElements(); if (context.dataCleanup && (context.getBlade() == null || context.getBlade().isInjector())) { dataCleanup(); } // Build stat objet stat = new DescriptiveStatistics(); for (double aValuesArray : valuesArray) { stat.addValue(aValuesArray); } // Build detailled graph callChart = createCallChart(); if (context.getBlade() == null || context.getBlade().isInjector()) { movingStatChart = createMovingStatChart(); // Build distribution graph fixedSliceNumberDistributionChart = createFixedSliceNumberDistributionChart(); fixedSliceSizeDistributionChart = createFixedSliceSizeDistributionChart(); quantileDistributionChart = createQuantileDistributionChart(); } statsAvailable = true; }
From source file:org.rascalmpl.library.analysis.statistics.Descriptive.java
DescriptiveStatistics make(IList dataValues) { DescriptiveStatistics stats = new DescriptiveStatistics(); for (IValue v : dataValues) { stats.addValue(((INumber) v).toReal().floatValue()); }//from w w w . jav a2 s.c om return stats; }
From source file:org.xenmaster.monitoring.data.Record.java
protected final void applyStatistics(Collection<Double> values) { // Let's get statistical DescriptiveStatistics ds = new DescriptiveStatistics(); for (double util : values) { ds.addValue(util);/*from w w w. ja v a 2s .c om*/ } double a = ds.getMean(); double stdDev = ds.getStandardDeviation(); // TODO: actually test this and generate warning // Check if all vCPUs have a fair load, e.g. [45, 60, 50] would be fair, [90, 4, 2] indicates you should learn threading if (stdDev > 0.8) { Logger.getLogger(getClass()) .info((vm ? "VM" : "Host") + " " + reference + " has an unfair load distribution"); } if (stdDev > 0) { try { NormalDistributionImpl ndi = new NormalDistributionImpl(ds.getMean(), stdDev); double cp = ndi.cumulativeProbability(90); if (cp > 0.8) { // 80% of the CPUs have a >90% load // TODO warning Logger.getLogger(getClass()).info((vm ? "VM" : "Host") + " " + reference + " has a load >=90% on 80% of the available CPUs"); } } catch (MathException ex) { Logger.getLogger(getClass()).error("Flawed maths", ex); } } }
From source file:playground.artemc.pricing.SocialCostCalculator.java
private void calcStatistics() { // Get a DescriptiveStatistics instance DescriptiveStatistics tripStats = new DescriptiveStatistics(); DescriptiveStatistics tripStatsNormalized = new DescriptiveStatistics(); // Add the data from the array for (LegTrip legTrip : performedLegs) { double distance = 0.0; double cost = 0.0; for (LinkTrip linkTrip : legTrip.linkTrips) { double socialCosts = calcSocCosts(linkTrip.link_id, linkTrip.enterTime); if (socialCosts > 0.0) cost = cost + socialCosts; distance = legTrip.distance + network.getLinks().get(linkTrip.link_id).getLength(); }/*from www . j av a 2 s . c o m*/ legTrip.distance = distance; legTrip.cost = cost; tripStats.addValue(cost); /* * Normalize a legs social cost by dividing them by the leg travel time or leg distance. */ //double legTravelTime = legTrip.arrivalTime - legTrip.departureTime; if (cost > 0.0 && legTrip.distance > 0.0) tripStatsNormalized.addValue(cost / legTrip.distance); } // Compute some statistics double sum = tripStats.getSum(); double mean = tripStats.getMean(); double std = tripStats.getStandardDeviation(); double median = tripStats.getPercentile(50); double quantile25 = tripStats.getPercentile(25); double quantile75 = tripStats.getPercentile(75); double sumNormalized = tripStatsNormalized.getSum(); double meanNormalized = tripStatsNormalized.getMean(); double stdNormalized = tripStatsNormalized.getStandardDeviation(); double medianNormalized = tripStatsNormalized.getPercentile(50); double quantile25Normalized = tripStatsNormalized.getPercentile(25); double quantile75Normalized = tripStatsNormalized.getPercentile(75); log.info("Sum of all leg costs: " + sum); log.info("Mean leg costs: " + mean); log.info("Standard deviation: " + std); log.info("Median leg costs: " + median); log.info("25% quantile leg costs: " + quantile25); log.info("75% quantile leg costs: " + quantile75); log.info("Normalized sum of all leg costs: " + sumNormalized); log.info("Normalized mean leg costs: " + meanNormalized); log.info("Normalized standard deviation: " + stdNormalized); log.info("Normalized median leg costs: " + medianNormalized); log.info("Normalized 25% quantile leg costs: " + quantile25Normalized); log.info("Normalized 75% quantile leg costs: " + quantile75Normalized); meanSocialCosts.add(mean); medianSocialCosts.add(median); quantil25PctSocialCosts.add(quantile25); quantil75PctSocialCosts.add(quantile75); meanNormalizedSocialCosts.add(meanNormalized); medianNormalizedSocialCosts.add(medianNormalized); quantil25PctNormalizedSocialCosts.add(quantile25Normalized); quantil75PctNormalizedSocialCosts.add(quantile75Normalized); }
From source file:playground.christoph.socialcosts.SocialCostCalculator.java
private void calcStatistics() { // Get a DescriptiveStatistics instance DescriptiveStatistics stats = new DescriptiveStatistics(); DescriptiveStatistics statsNormalized = new DescriptiveStatistics(); // Add the data from the array for (LegTrip legTrip : performedLegs) { double costs = 0.0; for (LinkTrip linkTrip : legTrip.linkTrips) { double socialCosts = calcSocCosts(linkTrip.link_id, linkTrip.enterTime); if (socialCosts > 0.0) costs = costs + socialCosts; }//ww w . ja va 2s .com stats.addValue(costs); /* * Normalize a legs social cost by dividing them by the leg travel time. * As a result we get something like social costs per traveled second. * Another option would be doing this on link level instead of leg level. */ double legTravelTime = legTrip.arrivalTime - legTrip.departureTime; if (costs > 0.0 && legTravelTime > 0.0) statsNormalized.addValue(costs / legTravelTime); } // Compute some statistics double sum = stats.getSum(); double mean = stats.getMean(); double std = stats.getStandardDeviation(); double median = stats.getPercentile(50); double quantile25 = stats.getPercentile(25); double quantile75 = stats.getPercentile(75); double sumNormalized = statsNormalized.getSum(); double meanNormalized = statsNormalized.getMean(); double stdNormalized = statsNormalized.getStandardDeviation(); double medianNormalized = statsNormalized.getPercentile(50); double quantile25Normalized = statsNormalized.getPercentile(25); double quantile75Normalized = statsNormalized.getPercentile(75); log.info("Sum of all leg costs: " + sum); log.info("Mean leg costs: " + mean); log.info("Standard deviation: " + std); log.info("Median leg costs: " + median); log.info("25% quantile leg costs: " + quantile25); log.info("75% quantile leg costs: " + quantile75); log.info("Normalized sum of all leg costs: " + sumNormalized); log.info("Normalized mean leg costs: " + meanNormalized); log.info("Normalized standard deviation: " + stdNormalized); log.info("Normalized median leg costs: " + medianNormalized); log.info("Normalized 25% quantile leg costs: " + quantile25Normalized); log.info("Normalized 75% quantile leg costs: " + quantile75Normalized); meanSocialCosts.add(mean); medianSocialCosts.add(median); quantil25PctSocialCosts.add(quantile25); quantil75PctSocialCosts.add(quantile75); meanNormalizedSocialCosts.add(meanNormalized); medianNormalizedSocialCosts.add(medianNormalized); quantil25PctNormalizedSocialCosts.add(quantile25Normalized); quantil75PctNormalizedSocialCosts.add(quantile75Normalized); }
From source file:playground.johannes.coopsim.analysis.AbstractPersonProperty.java
@Override public DescriptiveStatistics statistics(Set<? extends Person> persons) { TObjectDoubleHashMap<Person> values = values(persons); DescriptiveStatistics stats = new DescriptiveStatistics(); TObjectDoubleIterator<Person> it = values.iterator(); for (int i = 0; i < values.size(); i++) { it.advance();/*from w w w.java 2 s .c om*/ stats.addValue(it.value()); } return stats; }