List of usage examples for org.apache.commons.math3.stat.descriptive DescriptiveStatistics getMax
public double getMax()
From source file:com.fpuna.preproceso.TestApacheMathLibDemo.java
/** * @param args//from www .j av a 2 s. c o m */ public static void main(String[] args) { RandomGenerator randomGenerator = new JDKRandomGenerator(); System.out.println(randomGenerator.nextInt()); System.out.println(randomGenerator.nextDouble()); /** * Descriptive Statistics like MEAN,GP,SD,MAX * */ DescriptiveStatistics stats = new DescriptiveStatistics(); stats.addValue(1); stats.addValue(2); stats.addValue(3); stats.addValue(4); stats.addValue(5); stats.addValue(6); stats.addValue(7); System.out.print("Mean : " + stats.getMean() + "\n"); System.out.print("Standard deviation : " + stats.getStandardDeviation() + "\n"); System.out.print("Max : " + stats.getMax() + "\n"); /** * Complex number format a+bi * */ Complex c1 = new Complex(1, 2); Complex c2 = new Complex(2, 3); System.out.print("Absolute of c1 " + c1.abs() + "\n"); System.out.print("Addition : " + (c1.add(c2)) + "\n"); }
From source file:mase.deprecated.FastMathTest.java
public static void main(String[] args) { double MIN = -10; double MAX = 10; int N = 10000; DescriptiveStatistics diff = new DescriptiveStatistics(); DescriptiveStatistics diffJava = new DescriptiveStatistics(); long tFast = 0, tNormal = 0, tBounded = 0, tJava = 0; for (int i = 0; i < N; i++) { double x = Math.random() * (MAX - MIN) + MIN; long t = System.nanoTime(); double v1 = (1.0 / (1.0 + FastMath.expQuick(-1 * x))); tFast += System.nanoTime() - t; t = System.nanoTime();//from ww w . ja v a 2 s .co m double v2 = (1.0 / (1.0 + FastMath.exp(-1 * x))); tNormal += System.nanoTime() - t; t = System.nanoTime(); double v3 = (1.0 / (1.0 + BoundMath.exp(-1 * x))); tBounded += System.nanoTime() - t; t = System.nanoTime(); double v4 = (1.0 / (1.0 + Math.exp(-1 * x))); tJava += System.nanoTime() - t; diff.addValue(Math.abs(v1 - v2)); diffJava.addValue(Math.abs(v3 - v1)); } System.out.println("MAX: " + diff.getMax()); System.out.println("MEAN: " + diff.getMean()); System.out.println("MAX JAVA: " + diffJava.getMax()); System.out.println("MEAN JAVA: " + diffJava.getMean()); System.out.println("Fast: " + tFast); System.out.println("Normal: " + tNormal); System.out.println("Bounded: " + tBounded); System.out.println("Java: " + tJava); }
From source file:cc.redberry.core.performance.StableSort.java
/** * @param args the command line arguments *///from w w w .j ava2s. co m public static void main(String[] args) { try { //burn JVM BitsStreamGenerator bitsStreamGenerator = new Well19937c(); for (int i = 0; i < 1000; ++i) nextArray(1000, bitsStreamGenerator); System.out.println("!"); BufferedWriter timMeanOut = new BufferedWriter( new FileWriter("/home/stas/Projects/stableSort/timMean.dat")); BufferedWriter insertionMeanOut = new BufferedWriter( new FileWriter("/home/stas/Projects/stableSort/insertionMean.dat")); BufferedWriter timMaxOut = new BufferedWriter( new FileWriter("/home/stas/Projects/stableSort/timMax.dat")); BufferedWriter insertionMaxOut = new BufferedWriter( new FileWriter("/home/stas/Projects/stableSort/insertionMax.dat")); BufferedWriter timSigOut = new BufferedWriter( new FileWriter("/home/stas/Projects/stableSort/timSig.dat")); BufferedWriter insertionSigOut = new BufferedWriter( new FileWriter("/home/stas/Projects/stableSort/insertionSig.dat")); DescriptiveStatistics timSort; DescriptiveStatistics insertionSort; int tryies = 200; int arrayLength = 0; for (; arrayLength < 1000; ++arrayLength) { int[] coSort = nextArray(arrayLength, bitsStreamGenerator); timSort = new DescriptiveStatistics(); insertionSort = new DescriptiveStatistics(); for (int i = 0; i < tryies; ++i) { int[] t1 = nextArray(arrayLength, bitsStreamGenerator); int[] t2 = t1.clone(); long start = System.currentTimeMillis(); ArraysUtils.timSort(t1, coSort); long stop = System.currentTimeMillis(); timSort.addValue(stop - start); start = System.currentTimeMillis(); ArraysUtils.insertionSort(t2, coSort); stop = System.currentTimeMillis(); insertionSort.addValue(stop - start); } timMeanOut.write(arrayLength + "\t" + timSort.getMean() + "\n"); insertionMeanOut.write(arrayLength + "\t" + insertionSort.getMean() + "\n"); timMaxOut.write(arrayLength + "\t" + timSort.getMax() + "\n"); insertionMaxOut.write(arrayLength + "\t" + insertionSort.getMax() + "\n"); timSigOut.write(arrayLength + "\t" + timSort.getStandardDeviation() + "\n"); insertionSigOut.write(arrayLength + "\t" + insertionSort.getStandardDeviation() + "\n"); } timMeanOut.close(); insertionMeanOut.close(); timMaxOut.close(); insertionMaxOut.close(); timSigOut.close(); insertionSigOut.close(); } catch (IOException ex) { Logger.getLogger(StableSort.class.getName()).log(Level.SEVERE, null, ex); } }
From source file:com.weibo.motan.demo.client.DemoRpcClient.java
public static void main(String[] args) throws Exception { final DescriptiveStatistics stats = new SynchronizedDescriptiveStatistics(); int threads = Integer.parseInt(args[0]); DubboBenchmark.BenchmarkMessage msg = prepareArgs(); final byte[] msgBytes = msg.toByteArray(); int n = 1000000; final CountDownLatch latch = new CountDownLatch(n); ExecutorService es = Executors.newFixedThreadPool(threads); final AtomicInteger trans = new AtomicInteger(0); final AtomicInteger transOK = new AtomicInteger(0); ApplicationContext ctx = new ClassPathXmlApplicationContext( new String[] { "classpath:motan_demo_client.xml" }); MotanDemoService service = (MotanDemoService) ctx.getBean("motanDemoReferer"); long start = System.currentTimeMillis(); for (int i = 0; i < n; i++) { es.submit(() -> {/*from w w w .ja v a 2 s . co m*/ try { long t = System.currentTimeMillis(); DubboBenchmark.BenchmarkMessage m = testSay(service, msgBytes); t = System.currentTimeMillis() - t; stats.addValue(t); trans.incrementAndGet(); if (m != null && m.getField1().equals("OK")) { transOK.incrementAndGet(); } } finally { latch.countDown(); } }); } latch.await(); start = System.currentTimeMillis() - start; System.out.printf("sent requests : %d\n", n); System.out.printf("received requests : %d\n", trans.get()); System.out.printf("received requests_OK : %d\n", transOK.get()); System.out.printf("throughput (TPS) : %d\n", n * 1000 / start); System.out.printf("mean: %f\n", stats.getMean()); System.out.printf("median: %f\n", stats.getPercentile(50)); System.out.printf("max: %f\n", stats.getMax()); System.out.printf("min: %f\n", stats.getMin()); System.out.printf("99P: %f\n", stats.getPercentile(90)); }
From source file:es.upm.oeg.tools.rdfshapes.utils.CadinalityResultGenerator.java
public static void main(String[] args) throws Exception { String endpoint = "http://3cixty.eurecom.fr/sparql"; List<String> classList = Files.readAllLines(Paths.get(classListPath), Charset.defaultCharset()); String classPropertyQueryString = readFile(classPropertyQueryPath, Charset.defaultCharset()); String propertyCardinalityQueryString = readFile(propertyCardinalityQueryPath, Charset.defaultCharset()); String individualCountQueryString = readFile(individualCountQueryPath, Charset.defaultCharset()); DecimalFormat df = new DecimalFormat("0.0000"); //Create the Excel workbook and sheet XSSFWorkbook wb = new XSSFWorkbook(); XSSFSheet sheet = wb.createSheet("Cardinality"); int currentExcelRow = 0; int classStartRow = 0; for (String clazz : classList) { Map<String, String> litMap = new HashMap<>(); Map<String, String> iriMap = ImmutableMap.of("class", clazz); String queryString = bindQueryString(individualCountQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); int individualCount; List<RDFNode> c = executeQueryForList(queryString, endpoint, "c"); if (c.size() == 1) { individualCount = c.get(0).asLiteral().getInt(); } else {/*from w w w . ja v a 2 s. c om*/ continue; } // If there are zero individuals, continue if (individualCount == 0) { throw new IllegalStateException("Check whether " + classListPath + " and " + endpoint + " match."); } // System.out.println("***"); // System.out.println("### **" + clazz + "** (" + individualCount + ")"); // System.out.println("***"); // System.out.println(); classStartRow = currentExcelRow; XSSFRow row = sheet.createRow(currentExcelRow); XSSFCell cell = row.createCell(0); cell.setCellValue(clazz); cell.getCellStyle().setAlignment(CellStyle.ALIGN_CENTER); queryString = bindQueryString(classPropertyQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); List<RDFNode> nodeList = executeQueryForList(queryString, endpoint, "p"); for (RDFNode property : nodeList) { if (property.isURIResource()) { DescriptiveStatistics stats = new DescriptiveStatistics(); String propertyURI = property.asResource().getURI(); // System.out.println("* " + propertyURI); // System.out.println(); XSSFRow propertyRow = sheet.getRow(currentExcelRow); if (propertyRow == null) { propertyRow = sheet.createRow(currentExcelRow); } currentExcelRow++; XSSFCell propertyCell = propertyRow.createCell(1); propertyCell.setCellValue(propertyURI); Map<String, String> litMap2 = new HashMap<>(); Map<String, String> iriMap2 = ImmutableMap.of("class", clazz, "p", propertyURI); queryString = bindQueryString(propertyCardinalityQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap2, LITERAL_BINDINGS, litMap2)); List<Map<String, RDFNode>> solnMaps = executeQueryForList(queryString, endpoint, ImmutableSet.of("card", "count")); int sum = 0; List<CardinalityCount> cardinalityList = new ArrayList<>(); if (solnMaps.size() > 0) { for (Map<String, RDFNode> soln : solnMaps) { int count = soln.get("count").asLiteral().getInt(); int card = soln.get("card").asLiteral().getInt(); for (int i = 0; i < count; i++) { stats.addValue(card); } CardinalityCount cardinalityCount = new CardinalityCount(card, count, (((double) count) / individualCount) * 100); cardinalityList.add(cardinalityCount); sum += count; } // Check for zero cardinality instances int count = individualCount - sum; if (count > 0) { for (int i = 0; i < count; i++) { stats.addValue(0); } CardinalityCount cardinalityCount = new CardinalityCount(0, count, (((double) count) / individualCount) * 100); cardinalityList.add(cardinalityCount); } } Map<Integer, Double> cardMap = new HashMap<>(); for (CardinalityCount count : cardinalityList) { cardMap.put(count.getCardinality(), count.getPrecentage()); } XSSFCell instanceCountCell = propertyRow.createCell(2); instanceCountCell.setCellValue(individualCount); XSSFCell minCell = propertyRow.createCell(3); minCell.setCellValue(stats.getMin()); XSSFCell maxCell = propertyRow.createCell(4); maxCell.setCellValue(stats.getMax()); XSSFCell p1 = propertyRow.createCell(5); p1.setCellValue(stats.getPercentile(1)); XSSFCell p99 = propertyRow.createCell(6); p99.setCellValue(stats.getPercentile(99)); XSSFCell mean = propertyRow.createCell(7); mean.setCellValue(df.format(stats.getMean())); for (int i = 0; i < 21; i++) { XSSFCell dataCell = propertyRow.createCell(8 + i); Double percentage = cardMap.get(i); if (percentage != null) { dataCell.setCellValue(df.format(percentage)); } else { dataCell.setCellValue(0); } } // System.out.println("| Min Card. |Max Card. |"); // System.out.println("|---|---|"); // System.out.println("| ? | ? |"); // System.out.println(); } } //System.out.println("class start: " + classStartRow + ", class end: " + (currentExcelRow -1)); //We have finished writting properties of one class, now it's time to merge the cells int classEndRow = currentExcelRow - 1; if (classStartRow < classEndRow) { sheet.addMergedRegion(new CellRangeAddress(classStartRow, classEndRow, 0, 0)); } } String filename = "3cixty.xls"; FileOutputStream fileOut = new FileOutputStream(filename); wb.write(fileOut); fileOut.close(); }
From source file:io.hops.experiments.stats.TransactionStatsAggregator.java
public static Map<String, DescriptiveStatistics> aggregate(File statsFile, String headerPattern, String transaction, boolean printSummary) throws IOException { if (!statsFile.exists()) return null; transaction = transaction.toUpperCase(); BufferedReader reader = new BufferedReader(new FileReader(statsFile)); String tx = reader.readLine(); String[] headers = null;/*w w w .j a v a 2 s .c o m*/ Map<Integer, DescriptiveStatistics> statistics = Maps.newHashMap(); if (tx != null) { headers = tx.split(","); for (int i = 1; i < headers.length; i++) { String h = headers[i].toUpperCase(); if (h.contains(headerPattern) || headerPattern.equals(ALL)) { statistics.put(i, new DescriptiveStatistics()); } } } int txCount = 0; while ((tx = reader.readLine()) != null) { if (tx.startsWith(transaction) || transaction.equals(ALL)) { txCount++; String[] txStats = tx.split(","); if (txStats.length == headers.length) { for (Map.Entry<Integer, DescriptiveStatistics> e : statistics.entrySet()) { e.getValue().addValue(Double.valueOf(txStats[e.getKey()])); } } } } reader.close(); if (headers == null) return null; if (printSummary) { System.out.println("Transaction: " + transaction + " " + txCount); List<Integer> keys = new ArrayList<Integer>(statistics.keySet()); Collections.sort(keys); for (Integer i : keys) { DescriptiveStatistics stats = statistics.get(i); if (stats.getMin() == 0 && stats.getMax() == 0) { continue; } System.out.println(headers[i]); System.out.println("Min " + stats.getMin() + " Max " + stats.getMax() + " Avg " + stats.getMean() + " Std " + stats.getStandardDeviation()); } } Map<String, DescriptiveStatistics> annotatedStats = Maps.newHashMap(); for (Map.Entry<Integer, DescriptiveStatistics> e : statistics.entrySet()) { annotatedStats.put(headers[e.getKey()].trim(), e.getValue()); } return annotatedStats; }
From source file:com.intuit.tank.service.impl.v1.report.SummaryReportRunner.java
/** * @param jobId/*from w w w . j av a2 s . co m*/ * @param key * @param stats * @return */ private static PeriodicData getBucketData(int jobId, String key, BucketDataItem bucketItem) { DescriptiveStatistics stats = bucketItem.getStats(); PeriodicData ret = PeriodicDataBuilder.periodicData().withJobId(jobId).withMax(stats.getMax()) .withMean(stats.getMean()).withMin(stats.getMin()).withPageId(key) .withSampleSize((int) stats.getN()).withPeriod(bucketItem.getPeriod()) .withTimestamp(bucketItem.getStartTime()).build(); return ret; }
From source file:async.nio2.Main.java
private static String toEvaluationString(DescriptiveStatistics stats) { String data = String.format( "0.50 Percentile = %8.2f, " + "0.90 Percentile = %8.2f, " + "0.99 Percentile = %8.2f, " + "min = %8.2f, " + "max = %8.2f", stats.getMean(), stats.getPercentile(90), stats.getPercentile(99), stats.getMin(), stats.getMax()); stats.clear();/*from w w w . j a v a2s . c om*/ return data; }
From source file:com.github.aptd.simulation.core.statistic.local.CStatistic.java
/** * write data/*from ww w.j av a 2s . c om*/ * * @param p_writer writer instance * @param p_name section name * @param p_statistic statistic value */ private static void apply(final IWriter p_writer, final String p_name, final DescriptiveStatistics p_statistic) { p_writer.section(1, p_name); p_writer.value("geometricmean", p_statistic.getGeometricMean()); p_writer.value("kurtosis", p_statistic.getKurtosis()); p_writer.value("max", p_statistic.getMax()); p_writer.value("min", p_statistic.getMin()); p_writer.value("mean", p_statistic.getMean()); p_writer.value("count", p_statistic.getN()); p_writer.value("25-percentile", p_statistic.getPercentile(0.25)); p_writer.value("75-percentile", p_statistic.getPercentile(0.75)); p_writer.value("populationvariance", p_statistic.getPopulationVariance()); p_writer.value("quadraticmean", p_statistic.getQuadraticMean()); p_writer.value("standdeviation", p_statistic.getStandardDeviation()); p_writer.value("skewness", p_statistic.getSkewness()); p_writer.value("sum", p_statistic.getSum()); p_writer.value("sumsequared", p_statistic.getSumsq()); p_writer.value("variance", p_statistic.getVariance()); }
From source file:com.intuit.tank.service.impl.v1.report.SummaryReportRunner.java
/** * @param key/*from w ww. j ava 2s.c o m*/ * @param value * @return */ private static SummaryData getSummaryData(int jobId, String key, DescriptiveStatistics stats) { SummaryData ret = SummaryDataBuilder.summaryData().withJobId(jobId) .withKurtosis(!Double.isNaN(stats.getKurtosis()) ? stats.getKurtosis() : 0).withMax(stats.getMax()) .withMean(stats.getMean()).withMin(stats.getMin()).withPageId(key) .withPercentile10(stats.getPercentile(10)).withPercentile20(stats.getPercentile(20)) .withPercentile30(stats.getPercentile(30)).withPercentile40(stats.getPercentile(40)) .withPercentile50(stats.getPercentile(50)).withPercentile60(stats.getPercentile(60)) .withPercentile70(stats.getPercentile(70)).withPercentile80(stats.getPercentile(80)) .withPercentile90(stats.getPercentile(90)).withPercentile95(stats.getPercentile(95)) .withPercentile99(stats.getPercentile(99)).withSampleSize((int) stats.getN()) .withSkewness(!Double.isNaN(stats.getSkewness()) ? stats.getSkewness() : 0) .withSttDev(!Double.isNaN(stats.getStandardDeviation()) ? stats.getStandardDeviation() : 0) .withVarience(!Double.isNaN(stats.getVariance()) ? stats.getVariance() : 0).build(); return ret; }