List of usage examples for org.apache.commons.math3.stat.descriptive SummaryStatistics addValue
public void addValue(double value)
From source file:ijfx.core.stats.DefaultImageStatisticsService.java
@Override public <T extends RealType<T>> SummaryStatistics getSummaryStatistics(Cursor<T> cursor) { final SummaryStatistics stats = new SummaryStatistics(); cursor.reset();//ww w .j a v a2 s.c o m while (cursor.hasNext()) { cursor.fwd(); stats.addValue(cursor.get().getRealDouble()); } return stats; }
From source file:net.adamjak.thomas.graph.application.gui.ResultsWidnow.java
private JTable createJtResults() { if (this.results.containsKey("resultsData")) { GraphTestResult[][] results = (GraphTestResult[][]) this.results.get("resultsData"); String[] columnNames = { "Graph ID", "Avarage time", "Standard deviation", "Minimum", "Maximum" }; Object[][] data = new Object[results[0].length][5]; for (int graph = 0; graph < results[0].length; graph++) { SummaryStatistics summaryStatistics = new SummaryStatistics(); for (int run = 0; run < results.length; run++) { summaryStatistics.addValue((double) results[run][graph].getValue("timeInSeconds")); }/* w w w . java 2 s . c o m*/ data[graph][0] = graph; data[graph][1] = summaryStatistics.getMean(); data[graph][2] = summaryStatistics.getStandardDeviation(); data[graph][3] = summaryStatistics.getMin(); data[graph][4] = summaryStatistics.getMax(); } return new JTable(data, columnNames); } else { String[] columnNames = { "Description", "Result" }; Object[][] data = new Object[this.results.keySet().size()][2]; int i = 0; for (String key : this.results.keySet()) { data[i][0] = key; data[i][1] = this.results.get(key); i++; } return new JTable(data, columnNames); } }
From source file:com.thoughtworks.studios.journey.models.ActionCorrelationCalculation.java
public List<CorrelationResult> calculate() { Map<String, List<List<Integer>>> data = rawData(); List<CorrelationResult> results = new ArrayList<>(data.size()); for (String action : data.keySet()) { SpearmansCorrelation correlation = new SpearmansCorrelation(); List<List<Integer>> variables = data.get(action); double[] x = toDoubleArray(variables.get(0)); double[] y = toDoubleArray(variables.get(1)); double r = correlation.correlation(x, y); TDistribution tDistribution = new TDistribution(x.length - 2); double t = FastMath.abs(r * FastMath.sqrt((x.length - 2) / (1 - r * r))); double pValue = 2 * tDistribution.cumulativeProbability(-t); SummaryStatistics successSt = new SummaryStatistics(); SummaryStatistics failSt = new SummaryStatistics(); for (int i = 0; i < x.length; i++) { if (y[i] == 1) { successSt.addValue(x[i]); } else { failSt.addValue(x[i]);/* w ww . ja v a 2s. c om*/ } } results.add(new CorrelationResult(action, r, pValue, successSt, failSt)); } Collections.sort(results, new Comparator<CorrelationResult>() { @Override public int compare(CorrelationResult r1, CorrelationResult r2) { Double abs1 = Math.abs(r2.getCorrelation()); Double abs2 = Math.abs(r1.getCorrelation()); return abs1.compareTo(abs2); } }); return results; }
From source file:co.turnus.analysis.profiler.dynamic.FifoDataBox.java
public void collectData(StepDataBox stepData) { Action action = stepData.getStep().getAction(Action.class); int read = stepData.getReadTokens().count(fifo); int write = stepData.getWriteTokens().count(fifo); if (read > 0) { readTokens.addValue(read);/*from w ww. j a v a2 s.com*/ // lazy creation of the readers map SummaryStatistics stat = actionReadTokens.get(action); if (stat == null) { stat = new SummaryStatistics(); actionReadTokens.put(action, stat); } stat.addValue(read); } if (write > 0) { writeTokens.addValue(write); // lazy creation of the writers map SummaryStatistics stat = actionWriteTokens.get(action); if (stat == null) { stat = new SummaryStatistics(); actionWriteTokens.put(action, stat); } stat.addValue(write); } if (read != 0 || write != 0) { occupancy.addValue(tokenProducers.size()); } }
From source file:eu.crydee.alignment.aligner.ae.MetricsOneVsOneC.java
@Override public void collectionProcessComplete() throws AnalysisEngineProcessException { try {/*from www .j a va 2 s . c om*/ String template = IOUtils.toString(getClass() .getResourceAsStream("/eu/crydee/alignment/aligner/ae/" + "metrics-one-vs-one-template.html")); template = template.replace("@@TITLE@@", "Metrics comparator" + LocalDateTime.now().format(DateTimeFormatter.ISO_DATE_TIME)); template = template.replace("@@LEFTALGO@@", leftAlgoName); template = template.replace("@@RIGHTALGO@@", rightAlgoName); StringBuilder sb = new StringBuilder(); sb.append("<table class=\"table table-condensed\">\n").append(" <thead>\n") .append(" <tr>\n").append(" <th>Document\\Metric</th>\n"); for (String key : keys) { sb.append(" <th colspan=\"2\">").append(methodsMetadata.get(key).getRight()) .append("</th>\n"); } sb.append(" <tr>\n").append(" </thead>\n").append(" <tbody>\n") .append(" <tr>\n").append(" <td>\n") .append(" <strong>Total</strong>\n") .append(" </td>\n"); for (String key : keys) { SummaryStatistics ss1 = new SummaryStatistics(), ss2 = new SummaryStatistics(); List<Pair<Double, Double>> column = results.column(key).values().stream().peek(p -> { ss1.addValue(p.getLeft()); ss2.addValue(p.getRight()); }).collect(Collectors.toList()); boolean significant = TestUtils.pairedTTest(column.stream().mapToDouble(p -> p.getLeft()).toArray(), column.stream().mapToDouble(p -> p.getRight()).toArray(), 0.05); double mean1 = ss1.getMean(), mean2 = ss2.getMean(); boolean above = mean1 > mean2; String summary1 = String.format("%.3f", mean1) + "<small class=\"text-muted\">" + "" + String.format("%.3f", ss1.getStandardDeviation()) + "</small>", summary2 = String.format("%.3f", mean2) + "<small class=\"text-muted\">" + "" + String.format("%.3f", ss2.getStandardDeviation()) + "</small>"; sb.append(" <td class=\"") .append(significant ? (above ? "success" : "danger") : "warning").append("\">") .append(summary1).append("</td>\n"); sb.append(" <td class=\"") .append(significant ? (!above ? "success" : "danger") : "warning").append("\">") .append(summary2).append("</td>\n"); } sb.append(" </tr>\n"); SortedSet<String> rows = new TreeSet<>(results.rowKeySet()); for (String row : rows) { sb.append(" <tr>\n").append(" <td>").append(row) .append("</td>\n"); for (String key : keys) { Pair<Double, Double> r = results.get(row, key); sb.append(" <td>").append(String.format("%.3f", r.getLeft())) .append("</td>\n").append(" <td>") .append(String.format("%.3f", r.getRight())).append("</td>\n"); } sb.append(" </tr>\n"); } sb.append(" </tbody>\n").append(" </table>"); FileUtils.write(new File(htmlFilepath), template.replace("@@TABLE@@", sb.toString()), StandardCharsets.UTF_8); } catch (IOException ex) { logger.error("IO problem with the HTML output."); throw new AnalysisEngineProcessException(ex); } }
From source file:net.recommenders.rival.evaluation.statistics.ConfidenceInterval.java
/** * Method that takes two metrics as parameters. It will compute the * differences between both (only considering the keys in the overlap) * * @param <V> type of keys for metrics * @param alpha probability of incorrectly rejecting the null hypothesis (1 * - confidence_level)//from w ww .j a v a2 s .c om * @param baselineMetricPerDimension baseline metric, one value for each * dimension * @param testMetricPerDimension test metric, one value for each dimension * @param pairedSamples flag to indicate if the comparison should be made * for the distribution of difference scores (when true) or for the * distribution of differences between means * @return array with the confidence interval: [mean - margin of error, mean * + margin of error] */ public <V> double[] getConfidenceInterval(final double alpha, final Map<V, Double> baselineMetricPerDimension, final Map<V, Double> testMetricPerDimension, final boolean pairedSamples) { if (pairedSamples) { Set<V> overlap = new HashSet<V>(baselineMetricPerDimension.keySet()); overlap.retainAll(testMetricPerDimension.keySet()); // paired or matched samples --> analyse distribution of difference scores SummaryStatistics differences = new SummaryStatistics(); for (V key : overlap) { double diff = Math.abs(testMetricPerDimension.get(key) - baselineMetricPerDimension.get(key)); differences.addValue(diff); } return getConfidenceInterval(alpha / 2, (int) differences.getN() - 1, (int) differences.getN(), differences.getStandardDeviation(), differences.getMean()); } else { // independent samples --> analyse distribution of differences between means SummaryStatistics statsBaseline = new SummaryStatistics(); for (double d : baselineMetricPerDimension.values()) { statsBaseline.addValue(d); } SummaryStatistics statsTest = new SummaryStatistics(); for (double d : testMetricPerDimension.values()) { statsTest.addValue(d); } long dfT = statsBaseline.getN() + statsTest.getN() - 2; double sDif = Math.sqrt((1.0 / statsBaseline.getN() + 1.0 / statsTest.getN()) * (statsBaseline.getVariance() * (statsBaseline.getN() - 1) + statsTest.getVariance() * (statsTest.getN() - 1))); double mDif = Math.abs(statsTest.getMean() - statsBaseline.getMean()); return getConfidenceInterval(alpha, (int) dfT, (int) dfT, sDif, mDif); } }
From source file:model.scenario.OneLinkSupplyChainResult.java
public static OneLinkSupplyChainResult beefMonopolistOneRun(long random, float divideMonopolistGainsByThis, int monopolistSpeed, final boolean beefLearned, final boolean foodLearned, Function<SalesDepartment, AskPricingStrategy> woodPricingFactory, Function<SalesDepartment, AskPricingStrategy> furniturePricingFactory, File csvFileToWrite, File logFileToWrite, Path regressionLogToWrite) { final MacroII macroII = new MacroII(random); final OneLinkSupplyChainScenarioWithCheatingBuyingPrice scenario1 = new OneLinkSupplyChainScenarioWithCheatingBuyingPrice( macroII) {/*w ww. j a v a2s. c o m*/ @Override protected void buildBeefSalesPredictor(SalesDepartment dept) { if (beefLearned) { FixedDecreaseSalesPredictor predictor = SalesPredictor.Factory .newSalesPredictor(FixedDecreaseSalesPredictor.class, dept); predictor.setDecrementDelta(2); dept.setPredictorStrategy(predictor); } else { assert dept.getPredictorStrategy() instanceof ErrorCorrectingSalesPredictor; //assuming here nothing has been changed and we are still dealing with recursive sale predictors try { if (regressionLogToWrite != null) ((ErrorCorrectingSalesPredictor) dept.getPredictorStrategy()) .setDebugWriter(regressionLogToWrite); } catch (IOException e) { e.printStackTrace(); } } } @Override public void buildFoodPurchasesPredictor(PurchasesDepartment department) { if (foodLearned) department.setPredictor(new FixedIncreasePurchasesPredictor(0)); } @Override protected SalesDepartment createSalesDepartment(Firm firm, Market goodmarket) { SalesDepartment department = super.createSalesDepartment(firm, goodmarket); if (goodmarket.getGoodType().equals(OneLinkSupplyChainScenario.OUTPUT_GOOD)) { if (foodLearned) department.setPredictorStrategy(new FixedDecreaseSalesPredictor(0)); } return department; } @Override protected HumanResources createPlant(Blueprint blueprint, Firm firm, Market laborMarket) { HumanResources hr = super.createPlant(blueprint, firm, laborMarket); if (blueprint.getOutputs().containsKey(OneLinkSupplyChainScenario.INPUT_GOOD)) { if (beefLearned) { hr.setPredictor(new FixedIncreasePurchasesPredictor(1)); } } if (blueprint.getOutputs().containsKey(OneLinkSupplyChainScenario.OUTPUT_GOOD)) { if (foodLearned) hr.setPredictor(new FixedIncreasePurchasesPredictor(0)); } return hr; } }; scenario1.setControlType(MarginalMaximizer.class); scenario1.setSalesDepartmentType(SalesDepartmentOneAtATime.class); scenario1.setBeefPriceFilterer(null); if (woodPricingFactory != null) scenario1.setBeefPricingFactory(woodPricingFactory); if (furniturePricingFactory != null) scenario1.setFoodPricingFactory(furniturePricingFactory); //competition! scenario1.setNumberOfBeefProducers(1); scenario1.setBeefTargetInventory(100); scenario1.setFoodTargetInventory(100); scenario1.setNumberOfFoodProducers(5); scenario1.setDivideProportionalGainByThis(divideMonopolistGainsByThis); scenario1.setDivideIntegrativeGainByThis(divideMonopolistGainsByThis); //no delay scenario1.setBeefPricingSpeed(monopolistSpeed); //add csv writer if needed if (csvFileToWrite != null) DailyStatCollector.addDailyStatCollectorToModel(csvFileToWrite, macroII); macroII.setScenario(scenario1); macroII.start(); macroII.schedule.step(macroII); if (logFileToWrite != null) scenario1.getMarkets().get(OneLinkSupplyChainScenario.INPUT_GOOD).getSellers().iterator().next() .addLogEventListener(new LogToFile(logFileToWrite, LogLevel.DEBUG, macroII)); while (macroII.schedule.getTime() < 14000) { macroII.schedule.step(macroII); } SummaryStatistics averageFoodPrice = new SummaryStatistics(); SummaryStatistics averageBeefProduced = new SummaryStatistics(); SummaryStatistics averageBeefPrice = new SummaryStatistics(); SummaryStatistics averageSalesSlope = new SummaryStatistics(); SummaryStatistics averageHrSlope = new SummaryStatistics(); final Firm monopolist = (Firm) scenario1.getMarkets().get(OneLinkSupplyChainScenario.INPUT_GOOD) .getSellers().iterator().next(); for (int j = 0; j < 1000; j++) { //make the model run one more day: macroII.schedule.step(macroII); averageFoodPrice.addValue(macroII.getMarket(OneLinkSupplyChainScenario.OUTPUT_GOOD) .getLatestObservation(MarketDataType.AVERAGE_CLOSING_PRICE)); averageBeefProduced .addValue(macroII.getMarket(OneLinkSupplyChainScenario.INPUT_GOOD).getYesterdayVolume()); averageBeefPrice.addValue(macroII.getMarket(OneLinkSupplyChainScenario.INPUT_GOOD) .getLatestObservation(MarketDataType.AVERAGE_CLOSING_PRICE)); averageSalesSlope.addValue(monopolist.getSalesDepartment(OneLinkSupplyChainScenario.INPUT_GOOD) .getLatestObservation(SalesDataType.PREDICTED_DEMAND_SLOPE)); averageHrSlope.addValue(monopolist.getHRs().iterator().next() .getLatestObservation(PurchasesDataType.PREDICTED_SUPPLY_SLOPE)); } System.out.println("seed: " + random); System.out.println("beef price: " + averageBeefPrice.getMean()); System.out.println("food price: " + averageFoodPrice.getMean()); System.out.println("produced: " + averageBeefProduced.getMean()); extractAndPrintSlopesOfBeefSellers(macroII); System.out.println(); macroII.finish(); return new OneLinkSupplyChainResult(averageBeefPrice.getMean(), averageFoodPrice.getMean(), averageBeefProduced.getMean(), macroII); }
From source file:de.thkwalter.koordinatensystem.Achsendimensionierung.java
/** * Diese Methode bestimmt die Wertebereiche in x- und y-Richtung. * /*from w w w .java 2 s . co m*/ * @param punkte Die Punktemenge. * * @return Der Wertebereich der Punktemenge. */ private Wertebereich wertebereichBestimmen(Vector2D[] punkte) { // Die Objekte, die zur Bestimmung der minimalen und maximalen Werte verwendet werden, werden erzeugt. SummaryStatistics summaryStatisticsX = new SummaryStatistics(); SummaryStatistics summaryStatisticsY = new SummaryStatistics(); // Die Objekte, die zur Bestimmung der minimalen und maximalen Werte verwendet werden, werden initialisiert. for (Vector2D punkt : punkte) { summaryStatisticsX.addValue(punkt.getX()); summaryStatisticsY.addValue(punkt.getY()); } // Die Maxima und Minima werden bestimmt. double maxX = summaryStatisticsX.getMax(); double maxY = summaryStatisticsY.getMax(); double minX = summaryStatisticsX.getMin(); double minY = summaryStatisticsY.getMin(); // Der maximale Wertebereich der Punktemenge wird zurckgegeben. return new Wertebereich(maxX, maxY, minX, minY); }
From source file:gdsc.smlm.results.filter.HysteresisFilter.java
@Override public void setup(MemoryPeakResults peakResults) { ok = new HashSet<PeakResult>(); // Create a set of candidates and valid peaks MemoryPeakResults traceResults = new MemoryPeakResults(); // Initialise peaks to check LinkedList<PeakResult> candidates = new LinkedList<PeakResult>(); for (PeakResult result : peakResults.getResults()) { switch (getStatus(result)) { case OK:/*from www .j ava2 s. c o m*/ ok.add(result); traceResults.add(result); break; case CANDIDATE: candidates.add(result); traceResults.add(result); break; default: break; } } if (candidates.isEmpty()) return; // Find average precision of the candidates and use it for the search // distance SummaryStatistics stats = new SummaryStatistics(); final double nmPerPixel = peakResults.getNmPerPixel(); final double gain = peakResults.getGain(); final boolean emCCD = peakResults.isEMCCD(); for (PeakResult peakResult : candidates) { stats.addValue(peakResult.getPrecision(nmPerPixel, gain, emCCD)); } double distanceThreshold = stats.getMean() * searchDistance / nmPerPixel; // Trace through candidates TraceManager tm = new TraceManager(traceResults); tm.setTraceMode(TraceMode.LATEST_FORERUNNER); tm.traceMolecules(distanceThreshold, 1); Trace[] traces = tm.getTraces(); for (Trace trace : traces) { if (trace.size() > 1) { // Check if the trace touches a valid point boolean isOk = false; for (PeakResult result : trace.getPoints()) { if (ok.contains(result)) { isOk = true; break; } ok.add(result); } // Add the entire trace to the OK points if (isOk) { for (PeakResult result : trace.getPoints()) { ok.add(result); } } } } }
From source file:model.experiments.stickyprices.StickyPricesCSVPrinter.java
private static void woodMonopolistSupplyChainSweep() throws IOException { CSVWriter writer = new CSVWriter( new FileWriter(Paths.get("runs", "rawdata", "woodMonopolistStickinessesSweep.csv").toFile())); final String[] header = { "decisionSpeed", "stickiness", "distance", "finaldistance" }; System.out.println(Arrays.toString(header)); writer.writeNext(header);//from ww w. j a va2 s . co m for (int speed = 1; speed < 30; speed++) { for (int stickiness = 0; stickiness < 50; stickiness++) { SummaryStatistics distance = new SummaryStatistics(); SummaryStatistics finalDistance = new SummaryStatistics(); for (int seed = 0; seed < 5; seed++) { final double[] result = beefMonopolistOneRun(seed, 1, stickiness, true, true, speed, null); distance.addValue(result[0]); finalDistance.addValue(result[1]); } final String[] nextLine = { String.valueOf(speed), String.valueOf(stickiness), String.valueOf(distance.getMean()), String.valueOf(finalDistance.getMean()) }; System.out.println(Arrays.toString(nextLine)); writer.writeNext(nextLine); writer.flush(); } } writer.close(); }