List of usage examples for org.apache.commons.math3.stat.descriptive DescriptiveStatistics DescriptiveStatistics
public DescriptiveStatistics()
From source file:fr.inria.wimmics.coresetimer.CoreseTimer.java
public CoreseTimer run() throws ClassNotFoundException, IllegalAccessException, InstantiationException, IOException, LoadException { LOGGER.entering(CoreseTimer.class.getName(), "run"); assert (initialized); // Loading the nq data in corese, then applying several times the query. LOGGER.log(Level.INFO, "beginning with input #{0}", test.getInput()); // require to have a brand new adapter for each new input set. adapter = (CoreseAdapter) Class.forName(adapterName).newInstance(); String inputFileName = ""; switch (mode) { case MEMORY: { inputFileName += test.getInput(); adapter.preProcessing(inputFileName, true); break;/* w ww. j av a2 s . com*/ } case DB: { inputFileName += test.getInputDb(); System.setProperty("fr.inria.corese.tinkerpop.dbinput", inputFileName); adapter.preProcessing(inputFileName, false); break; } } String query = test.getRequest(); LOGGER.log(Level.INFO, "processing nbQuery #{0}", query); stats = new DescriptiveStatistics(); statsMemory = new DescriptiveStatistics(); int nbCycles = test.getMeasuredCycles() + test.getWarmupCycles(); boolean measured = true; for (int i = 0; i < nbCycles; i++) { LOGGER.log(Level.INFO, "iteration #{0}", i); System.gc(); final long startTime = System.currentTimeMillis(); LOGGER.log(Level.INFO, "before query"); ExecutorService executor = Executors.newSingleThreadExecutor(); Future<?> future = executor.submit(new Runnable() { @Override public void run() { adapter.execQuery(query); } }); try { future.get(1, TimeUnit.HOURS); measured = true; } catch (InterruptedException | TimeoutException e) { future.cancel(true); measured = false; LOGGER.log(Level.WARNING, "Terminated!"); } catch (ExecutionException ex) { Logger.getLogger(CoreseTimer.class.getName()).log(Level.SEVERE, null, ex); } executor.shutdownNow(); LOGGER.log(Level.INFO, "after query"); final long endTime = System.currentTimeMillis(); long delta = endTime - startTime; long memoryUsage = getMemoryUsage(); LOGGER.info(String.format("elapsed time = %d ms", delta)); LOGGER.info(String.format("used memory = %d bytes", memoryUsage)); if (i >= test.getWarmupCycles()) { if (!measured) { while (i < nbCycles) { stats.addValue(-100); statsMemory.addValue(memoryUsage); i++; } } else { stats.addValue(delta); statsMemory.addValue(memoryUsage); } } } adapter.saveResults(test.getOutputPath()); mappings = adapter.getMappings(); adapter.postProcessing(); LOGGER.exiting(CoreseTimer.class.getName(), "run"); return this; }
From source file:com.itemanalysis.psychometrics.measurement.TestSummary.java
public TestSummary(int numberOfItems, int numberOfSubscales, int[] cutScores, LinkedHashMap<VariableName, VariableAttributes> variableAttributeMap, boolean unbiased, boolean deletedReliability, boolean showCsem) { this.variableAttributes = new ArrayList<VariableAttributes>(); for (VariableName v : variableAttributeMap.keySet()) { this.variableAttributes.add(variableAttributeMap.get(v)); }/*from www .ja v a 2s. co m*/ this.unbiased = unbiased; this.numberOfItems = numberOfItems; this.cutScores = cutScores; this.deletedReliability = deletedReliability; this.showCsem = showCsem; stats = new DescriptiveStatistics(); stdDev = new StandardDeviation(unbiased); relMatrix = new CovarianceMatrix(variableAttributes); this.numberOfSubscales = numberOfSubscales; if (numberOfSubscales > 1) partRelMatrix = new CovarianceMatrix(numberOfSubscales); }
From source file:com.linuxbox.enkive.teststats.StatsHourGrainTest.java
@Test public void consolidationMethods() { List<Map<String, Object>> consolidatedData = grain.consolidateData(); assertTrue("the consolidated data is null", consolidatedData != null); String methods[] = { CONSOLIDATION_AVG, CONSOLIDATION_MAX, CONSOLIDATION_MIN }; DescriptiveStatistics statsMaker = new DescriptiveStatistics(); statsMaker.addValue(111);/*from w w w. j av a 2 s. c om*/ statsMaker.addValue(11); statsMaker.addValue(1); Map<String, Object> statData = createMap(); for (String method : methods) { grain.methodMapBuilder(method, statsMaker, statData); } assertTrue("methodMapBuilder returned null", statData != null); }
From source file:io.yields.math.framework.DomainTest.java
@Explore(name = "Multiple Explorations", dataProvider = DataProviders.FixedMersenneTwisterDataProvider.class) @Exploration(name = "Test Function", context = FunctionExplorerContext.class, group = "domain") @Exploration(name = "Test Function 2", context = Function2ExplorerContext.class, group = "domain") public void testMultipleExplorations(List<Explorer<Double>> explorers) { for (Explorer<Double> explorer : explorers) { assertThat(explorer.all().count()).isEqualTo(explorer.valid().count()); assertThat(explorer.invalid().count()).isEqualTo(0); assertThat(explorer.propertyError().count()).isEqualTo(0); DescriptiveStatistics stats = new DescriptiveStatistics(); explorer.all().forEach(result -> stats.addValue(result.getFunctionOutcome().orElse(0d))); assertThat(stats.getMean()).isEqualTo(0, delta(0.1)); assertThat(stats.getMax()).isEqualTo(1, delta(0.1)); assertThat(stats.getMin()).isEqualTo(-1, delta(0.1)); }/*w ww . j a v a 2s . co m*/ // compare 2 explorers Explorer<Double> firstExplorer = explorers.get(0); Explorer<Double> secondExplorer = explorers.get(1); List<PropertyVerifications<Double>> resultsOfFirstExplorer = firstExplorer.all() .collect(Collectors.toList()); List<PropertyVerifications<Double>> resultsOfSecondExplorer = secondExplorer.all() .collect(Collectors.toList()); for (int i = 0; i < resultsOfFirstExplorer.size(); i++) { assertThat(resultsOfFirstExplorer.get(i).getFunctionOutcome().orElse(0d)) .isEqualTo(resultsOfSecondExplorer.get(i).getFunctionOutcome().orElse(0d), delta(2d)); } }
From source file:com.mapd.bench.Benchmark.java
String executeQuery(String sql, int expected, int iterations, int queryNum) { Connection conn = null;/*from w w w .java2 s .co m*/ Statement stmt = null; Long firstExecute = 0l; Long firstJdbc = 0l; Long firstIterate = 0l; DescriptiveStatistics statsExecute = new DescriptiveStatistics(); DescriptiveStatistics statsJdbc = new DescriptiveStatistics(); DescriptiveStatistics statsIterate = new DescriptiveStatistics(); DescriptiveStatistics statsTotal = new DescriptiveStatistics(); long totalTime = 0; try { //Open a connection logger.debug("Connecting to database url :" + url); conn = DriverManager.getConnection(url, iUser, iPasswd); long startTime = System.currentTimeMillis(); for (int loop = 0; loop < iterations; loop++) { //Execute a query stmt = conn.createStatement(); long timer = System.currentTimeMillis(); ResultSet rs = stmt.executeQuery(sql); long executeTime = 0; long jdbcTime = 0; // gather internal execute time for MapD as we are interested in that if (driver.equals(JDBC_DRIVER)) { executeTime = stmt.getQueryTimeout(); jdbcTime = (System.currentTimeMillis() - timer) - executeTime; } else { jdbcTime = (System.currentTimeMillis() - timer); executeTime = 0; } // this is fake to get our intenal execute time. logger.debug("Query Timeout/AKA internal Execution Time was " + stmt.getQueryTimeout() + " ms Elapsed time in JVM space was " + (System.currentTimeMillis() - timer) + "ms"); timer = System.currentTimeMillis(); //Extract data from result set int resultCount = 0; while (rs.next()) { Object obj = rs.getObject(1); if (obj != null && obj.equals(statsExecute)) { logger.info("Impossible"); } resultCount++; } long iterateTime = (System.currentTimeMillis() - timer); if (resultCount != expected) { logger.error("Expect " + expected + " actual " + resultCount + " for query " + sql); // don't run anymore break; } if (loop == 0) { firstJdbc = jdbcTime; firstExecute = executeTime; firstIterate = iterateTime; } else { statsJdbc.addValue(jdbcTime); statsExecute.addValue(executeTime); statsIterate.addValue(iterateTime); statsTotal.addValue(jdbcTime + executeTime + iterateTime); } //Clean-up environment rs.close(); stmt.close(); } totalTime = System.currentTimeMillis() - startTime; conn.close(); } catch (SQLException se) { //Handle errors for JDBC se.printStackTrace(); } catch (Exception e) { //Handle errors for Class.forName e.printStackTrace(); } finally { //finally block used to close resources try { if (stmt != null) { stmt.close(); } } catch (SQLException se2) { } // nothing we can do try { if (conn != null) { conn.close(); } } catch (SQLException se) { se.printStackTrace(); } //end finally try } //end try return String.format(lineDescriptor, queryNum, statsTotal.getMean(), statsTotal.getMin(), statsTotal.getMax(), statsTotal.getPercentile(85), statsExecute.getMean(), statsExecute.getMin(), statsExecute.getMax(), statsExecute.getPercentile(85), statsExecute.getPercentile(25), statsExecute.getStandardDeviation(), statsJdbc.getMean(), statsJdbc.getMin(), statsJdbc.getMax(), statsJdbc.getPercentile(85), statsIterate.getMean(), statsIterate.getMin(), statsIterate.getMax(), statsIterate.getPercentile(85), firstExecute, firstJdbc, firstIterate, iterations, totalTime, (long) statsTotal.getSum() + firstExecute + firstJdbc + firstIterate); }
From source file:knop.psfj.BeadAverager.java
/** * Filter.//from w w w.jav a 2 s . c o m * * @param frameList the frame list * @return the array list */ public ArrayList<BeadFrame> filter(ArrayList<BeadFrame> frameList) { ArrayList<BeadFrame> filtered = new ArrayList<BeadFrame>(); DescriptiveStatistics centerX = new DescriptiveStatistics(); DescriptiveStatistics centerY = new DescriptiveStatistics(); DescriptiveStatistics centerZ = new DescriptiveStatistics(); for (BeadFrame frame : frameList) { centerX.addValue(frame.getCentroidX()); centerY.addValue(frame.getCentroidY()); centerZ.addValue(frame.getCentroidZ()); } double thresholdUp = centerZ.getPercentile(50) + centerZ.getStandardDeviation(); double thresholdDown = thresholdUp - (centerZ.getStandardDeviation() * 2); for (BeadFrame frame : frameList) { if (frame.getCentroidZ() < thresholdUp && frame.getCentroidZ() > thresholdDown) { filtered.add(frame); } else { incrementFilteredOutBeadCount(); } } return filtered; }
From source file:com.caseystella.analytics.outlier.batch.rpca.RPCA.java
private double standardDeviation(double[][] x) { DescriptiveStatistics stats = new DescriptiveStatistics(); for (int i = 0; i < x.length; i++) for (int j = 0; j < x[i].length; j++) stats.addValue(x[i][j]);/*from w w w. j a v a 2 s .c o m*/ return stats.getStandardDeviation(); }
From source file:com.joliciel.talismane.extensions.corpus.CorpusStatistics.java
@Override public void onNextParseConfiguration(ParseConfiguration parseConfiguration, Writer writer) { sentenceCount++;/* www . j av a2 s . c o m*/ sentenceLengthStats.addValue(parseConfiguration.getPosTagSequence().size()); for (PosTaggedToken posTaggedToken : parseConfiguration.getPosTagSequence()) { if (posTaggedToken.getTag().equals(PosTag.ROOT_POS_TAG)) continue; Token token = posTaggedToken.getToken(); String word = token.getOriginalText(); words.add(word); if (referenceWords != null) { if (!referenceWords.contains(word)) unknownTokenCount++; } if (alphanumeric.matcher(token.getOriginalText()).find()) { String lowercase = word.toLowerCase(talismaneSession.getLocale()); lowerCaseWords.add(lowercase); alphanumericCount++; if (referenceLowercaseWords != null) { if (!referenceLowercaseWords.contains(lowercase)) unknownAlphanumericCount++; } } tokenCount++; Integer countObj = posTagCounts.get(posTaggedToken.getTag().getCode()); int count = countObj == null ? 0 : countObj.intValue(); count++; posTagCounts.put(posTaggedToken.getTag().getCode(), count); } int maxDepth = 0; DescriptiveStatistics avgSyntaxDepthForSentenceStats = new DescriptiveStatistics(); for (DependencyArc arc : parseConfiguration.getDependencies()) { Integer countObj = depLabelCounts.get(arc.getLabel()); int count = countObj == null ? 0 : countObj.intValue(); count++; depLabelCounts.put(arc.getLabel(), count); totalDepCount++; if (arc.getHead().getTag().equals(PosTag.ROOT_POS_TAG) && (arc.getLabel() == null || arc.getLabel().length() == 0)) { // do nothing for unattached stuff (e.g. punctuation) } else if (arc.getLabel().equals("ponct")) { // do nothing for punctuation } else { int depth = 0; DependencyArc theArc = arc; while (theArc != null && !theArc.getHead().getTag().equals(PosTag.ROOT_POS_TAG)) { theArc = parseConfiguration.getGoverningDependency(theArc.getHead()); depth++; } if (depth > maxDepth) maxDepth = depth; syntaxDepthStats.addValue(depth); avgSyntaxDepthForSentenceStats.addValue(depth); int distance = Math .abs(arc.getHead().getToken().getIndex() - arc.getDependent().getToken().getIndex()); syntaxDistanceStats.addValue(distance); } maxSyntaxDepthStats.addValue(maxDepth); if (avgSyntaxDepthForSentenceStats.getN() > 0) avgSyntaxDepthStats.addValue(avgSyntaxDepthForSentenceStats.getMean()); } // we cheat a little bit by only allowing each arc to count once // there could be a situation where there are two independent non-projective arcs // crossing the same mother arc, but we prefer here to underestimate, // as this phenomenon is quite rare. Set<DependencyArc> nonProjectiveArcs = new HashSet<DependencyArc>(); int i = 0; for (DependencyArc arc : parseConfiguration.getDependencies()) { i++; if (arc.getHead().getTag().equals(PosTag.ROOT_POS_TAG) && (arc.getLabel() == null || arc.getLabel().length() == 0)) continue; if (nonProjectiveArcs.contains(arc)) continue; int headIndex = arc.getHead().getToken().getIndex(); int depIndex = arc.getDependent().getToken().getIndex(); int startIndex = headIndex < depIndex ? headIndex : depIndex; int endIndex = headIndex >= depIndex ? headIndex : depIndex; int j = 0; for (DependencyArc otherArc : parseConfiguration.getDependencies()) { j++; if (j <= i) continue; if (otherArc.getHead().getTag().equals(PosTag.ROOT_POS_TAG) && (otherArc.getLabel() == null || otherArc.getLabel().length() == 0)) continue; if (nonProjectiveArcs.contains(otherArc)) continue; int headIndex2 = otherArc.getHead().getToken().getIndex(); int depIndex2 = otherArc.getDependent().getToken().getIndex(); int startIndex2 = headIndex2 < depIndex2 ? headIndex2 : depIndex2; int endIndex2 = headIndex2 >= depIndex2 ? headIndex2 : depIndex2; boolean nonProjective = false; if (startIndex2 < startIndex && endIndex2 > startIndex && endIndex2 < endIndex) { nonProjective = true; } else if (startIndex2 > startIndex && startIndex2 < endIndex && endIndex2 > endIndex) { nonProjective = true; } if (nonProjective) { nonProjectiveArcs.add(arc); nonProjectiveArcs.add(otherArc); nonProjectiveCount++; LOG.debug("Non-projective arcs in sentence: " + parseConfiguration.getSentence().getText()); LOG.debug(arc.toString()); LOG.debug(otherArc.toString()); break; } } } }
From source file:io.hops.experiments.results.compiler.InterleavedBMResultsAggregator.java
public static InterleavedBMResults processInterleavedResults(Collection<Object> responses, Configuration args) throws FileNotFoundException, IOException, InterruptedException { Map<BenchmarkOperations, double[][]> allOpsPercentiles = new HashMap<BenchmarkOperations, double[][]>(); System.out.println("Processing the results "); DescriptiveStatistics successfulOps = new DescriptiveStatistics(); DescriptiveStatistics failedOps = new DescriptiveStatistics(); DescriptiveStatistics speed = new DescriptiveStatistics(); DescriptiveStatistics duration = new DescriptiveStatistics(); DescriptiveStatistics opsLatency = new DescriptiveStatistics(); DescriptiveStatistics noOfNNs = new DescriptiveStatistics(); for (Object obj : responses) { if (!(obj instanceof InterleavedBenchmarkCommand.Response)) { throw new IllegalStateException("Wrong response received from the client"); } else {/*from ww w . j a v a 2 s .co m*/ InterleavedBenchmarkCommand.Response response = (InterleavedBenchmarkCommand.Response) obj; successfulOps.addValue(response.getTotalSuccessfulOps()); failedOps.addValue(response.getTotalFailedOps()); speed.addValue(response.getOpsPerSec()); duration.addValue(response.getRunTime()); opsLatency.addValue(response.getAvgOpLatency()); noOfNNs.addValue(response.getNnCount()); } } //write the response objects to files. //these files are processed by CalculatePercentiles.java int responseCount = 0; for (Object obj : responses) { if (!(obj instanceof InterleavedBenchmarkCommand.Response)) { throw new IllegalStateException("Wrong response received from the client"); } else { String filePath = args.getResultsDir(); if (!filePath.endsWith("/")) { filePath += "/"; } InterleavedBenchmarkCommand.Response response = (InterleavedBenchmarkCommand.Response) obj; filePath += "ResponseRawData" + responseCount++ + ConfigKeys.RAW_RESPONSE_FILE_EXT; System.out.println("Writing Rwaw results to " + filePath); FileOutputStream fout = new FileOutputStream(filePath); ObjectOutputStream oos = new ObjectOutputStream(fout); oos.writeObject(response); oos.close(); } } InterleavedBMResults result = new InterleavedBMResults(args.getNamenodeCount(), (int) Math.floor(noOfNNs.getMean()), args.getNdbNodesCount(), args.getInterleavedBmWorkloadName(), (successfulOps.getSum() / ((duration.getMean() / 1000))), (duration.getMean() / 1000), (successfulOps.getSum()), (failedOps.getSum()), allOpsPercentiles, opsLatency.getMean()); // // failover testing // if(args.testFailover()){ // if(responses.size() != 1){ // throw new UnsupportedOperationException("Currently we only support failover testing for one slave machine"); // } // // String prefix = args.getBenchMarkFileSystemName().toString(); // if(args.getBenchMarkFileSystemName() == BenchMarkFileSystemName.HopsFS){ // prefix+="-"+args.getNameNodeSelectorPolicy(); // } // // final String outputFolder = args.getResultsDir(); // InterleavedBenchmarkCommand.Response response = (InterleavedBenchmarkCommand.Response)responses.iterator().next(); // // // StringBuilder sb = new StringBuilder(); // for(String data : response.getFailOverLog()){ // sb.append(data).append("\n"); // } // // String datFile = prefix+"-failover.dat"; // CompileResults.writeToFile(outputFolder+"/"+datFile, sb.toString(), false); // // // StringBuilder plot = new StringBuilder("set terminal postscript eps enhanced color font \"Helvetica,18\" #monochrome\n"); // plot.append( "set output '| ps2pdf - failover.pdf'\n"); // plot.append( "#set size 1,0.75 \n "); // plot.append( "set ylabel \"ops/sec\" \n"); // plot.append( "set xlabel \"Time (sec)\" \n"); // plot.append( "set format y \"%.0s%c\"\n"); // // // StringBuilder sbx = new StringBuilder(); // String oldPt = ""; // for(String data : response.getFailOverLog()){ // // if(data.startsWith("#")) { // StringTokenizer st = new StringTokenizer(oldPt); // long time = Long.parseLong(st.nextToken()); // long spd = Long.parseLong(st.nextToken()); // sbx.append("set label 'NN-Restart' at "+time+","+spd+" rotate by 270").append("\n"); // } // oldPt = data; // } // plot.append(sbx.toString()); // // // plot.append( "plot '"+datFile+"' with linespoints ls 1"); // CompileResults.writeToFile(outputFolder+"/"+prefix+"-failover.gnu", plot.toString(), false); // // } return result; }
From source file:knop.psfj.heatmap.FractionnedSpace.java
/** * Gets the pixel value.// w ww . ja va 2 s. c om * * @param f the f * @return the pixel value */ public float getPixelValue(Fraction f) { DescriptiveStatistics stats = new DescriptiveStatistics(); float median = Float.NaN; //System.out.println(f); if (f.size() != 0) { for (Point p : f) { //System.out.println(p.getValue()); if (stats == null) System.out.println(stats); if (p == null) continue; stats.addValue(p.getValue()); } median = new Float(stats.getPercentile(50)); //System.out.println("Resultat : "+median); } return median; }