Example usage for org.apache.commons.math3.stat.descriptive DescriptiveStatistics getMin

List of usage examples for org.apache.commons.math3.stat.descriptive DescriptiveStatistics getMin

Introduction

In this page you can find the example usage for org.apache.commons.math3.stat.descriptive DescriptiveStatistics getMin.

Prototype

public double getMin() 

Source Link

Document

Returns the minimum of the available values

Usage

From source file:com.weibo.motan.demo.client.DemoRpcClient.java

public static void main(String[] args) throws Exception {
    final DescriptiveStatistics stats = new SynchronizedDescriptiveStatistics();

    int threads = Integer.parseInt(args[0]);

    DubboBenchmark.BenchmarkMessage msg = prepareArgs();
    final byte[] msgBytes = msg.toByteArray();

    int n = 1000000;
    final CountDownLatch latch = new CountDownLatch(n);

    ExecutorService es = Executors.newFixedThreadPool(threads);

    final AtomicInteger trans = new AtomicInteger(0);
    final AtomicInteger transOK = new AtomicInteger(0);

    ApplicationContext ctx = new ClassPathXmlApplicationContext(
            new String[] { "classpath:motan_demo_client.xml" });

    MotanDemoService service = (MotanDemoService) ctx.getBean("motanDemoReferer");

    long start = System.currentTimeMillis();
    for (int i = 0; i < n; i++) {
        es.submit(() -> {/*from w  ww . j a  va  2s  .  co  m*/
            try {

                long t = System.currentTimeMillis();
                DubboBenchmark.BenchmarkMessage m = testSay(service, msgBytes);
                t = System.currentTimeMillis() - t;
                stats.addValue(t);

                trans.incrementAndGet();

                if (m != null && m.getField1().equals("OK")) {
                    transOK.incrementAndGet();
                }

            } finally {
                latch.countDown();
            }
        });
    }

    latch.await();

    start = System.currentTimeMillis() - start;

    System.out.printf("sent     requests    : %d\n", n);
    System.out.printf("received requests    : %d\n", trans.get());
    System.out.printf("received requests_OK : %d\n", transOK.get());
    System.out.printf("throughput  (TPS)    : %d\n", n * 1000 / start);

    System.out.printf("mean: %f\n", stats.getMean());
    System.out.printf("median: %f\n", stats.getPercentile(50));
    System.out.printf("max: %f\n", stats.getMax());
    System.out.printf("min: %f\n", stats.getMin());

    System.out.printf("99P: %f\n", stats.getPercentile(90));

}

From source file:es.upm.oeg.tools.rdfshapes.utils.CadinalityResultGenerator.java

public static void main(String[] args) throws Exception {

    String endpoint = "http://3cixty.eurecom.fr/sparql";

    List<String> classList = Files.readAllLines(Paths.get(classListPath), Charset.defaultCharset());

    String classPropertyQueryString = readFile(classPropertyQueryPath, Charset.defaultCharset());
    String propertyCardinalityQueryString = readFile(propertyCardinalityQueryPath, Charset.defaultCharset());
    String individualCountQueryString = readFile(individualCountQueryPath, Charset.defaultCharset());

    DecimalFormat df = new DecimalFormat("0.0000");

    //Create the Excel workbook and sheet
    XSSFWorkbook wb = new XSSFWorkbook();
    XSSFSheet sheet = wb.createSheet("Cardinality");

    int currentExcelRow = 0;
    int classStartRow = 0;

    for (String clazz : classList) {

        Map<String, String> litMap = new HashMap<>();
        Map<String, String> iriMap = ImmutableMap.of("class", clazz);

        String queryString = bindQueryString(individualCountQueryString,
                ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap));

        int individualCount;
        List<RDFNode> c = executeQueryForList(queryString, endpoint, "c");
        if (c.size() == 1) {
            individualCount = c.get(0).asLiteral().getInt();
        } else {/* ww  w.ja  v a 2s  .  co  m*/
            continue;
        }

        // If there are zero individuals, continue
        if (individualCount == 0) {
            throw new IllegalStateException("Check whether " + classListPath + " and " + endpoint + " match.");
        }

        //            System.out.println("***");
        //            System.out.println("### **" + clazz + "** (" + individualCount + ")");
        //            System.out.println("***");
        //            System.out.println();

        classStartRow = currentExcelRow;
        XSSFRow row = sheet.createRow(currentExcelRow);
        XSSFCell cell = row.createCell(0);
        cell.setCellValue(clazz);
        cell.getCellStyle().setAlignment(CellStyle.ALIGN_CENTER);

        queryString = bindQueryString(classPropertyQueryString,
                ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap));

        List<RDFNode> nodeList = executeQueryForList(queryString, endpoint, "p");

        for (RDFNode property : nodeList) {
            if (property.isURIResource()) {

                DescriptiveStatistics stats = new DescriptiveStatistics();

                String propertyURI = property.asResource().getURI();
                //                    System.out.println("* " + propertyURI);
                //                    System.out.println();

                XSSFRow propertyRow = sheet.getRow(currentExcelRow);
                if (propertyRow == null) {
                    propertyRow = sheet.createRow(currentExcelRow);
                }
                currentExcelRow++;

                XSSFCell propertyCell = propertyRow.createCell(1);
                propertyCell.setCellValue(propertyURI);

                Map<String, String> litMap2 = new HashMap<>();
                Map<String, String> iriMap2 = ImmutableMap.of("class", clazz, "p", propertyURI);

                queryString = bindQueryString(propertyCardinalityQueryString,
                        ImmutableMap.of(IRI_BINDINGS, iriMap2, LITERAL_BINDINGS, litMap2));

                List<Map<String, RDFNode>> solnMaps = executeQueryForList(queryString, endpoint,
                        ImmutableSet.of("card", "count"));

                int sum = 0;
                List<CardinalityCount> cardinalityList = new ArrayList<>();
                if (solnMaps.size() > 0) {

                    for (Map<String, RDFNode> soln : solnMaps) {
                        int count = soln.get("count").asLiteral().getInt();
                        int card = soln.get("card").asLiteral().getInt();

                        for (int i = 0; i < count; i++) {
                            stats.addValue(card);
                        }

                        CardinalityCount cardinalityCount = new CardinalityCount(card, count,
                                (((double) count) / individualCount) * 100);
                        cardinalityList.add(cardinalityCount);
                        sum += count;
                    }

                    // Check for zero cardinality instances
                    int count = individualCount - sum;
                    if (count > 0) {
                        for (int i = 0; i < count; i++) {
                            stats.addValue(0);
                        }
                        CardinalityCount cardinalityCount = new CardinalityCount(0, count,
                                (((double) count) / individualCount) * 100);
                        cardinalityList.add(cardinalityCount);
                    }
                }

                Map<Integer, Double> cardMap = new HashMap<>();
                for (CardinalityCount count : cardinalityList) {
                    cardMap.put(count.getCardinality(), count.getPrecentage());
                }

                XSSFCell instanceCountCell = propertyRow.createCell(2);
                instanceCountCell.setCellValue(individualCount);

                XSSFCell minCell = propertyRow.createCell(3);
                minCell.setCellValue(stats.getMin());

                XSSFCell maxCell = propertyRow.createCell(4);
                maxCell.setCellValue(stats.getMax());

                XSSFCell p1 = propertyRow.createCell(5);
                p1.setCellValue(stats.getPercentile(1));

                XSSFCell p99 = propertyRow.createCell(6);
                p99.setCellValue(stats.getPercentile(99));

                XSSFCell mean = propertyRow.createCell(7);
                mean.setCellValue(df.format(stats.getMean()));

                for (int i = 0; i < 21; i++) {
                    XSSFCell dataCell = propertyRow.createCell(8 + i);
                    Double percentage = cardMap.get(i);
                    if (percentage != null) {
                        dataCell.setCellValue(df.format(percentage));
                    } else {
                        dataCell.setCellValue(0);
                    }
                }

                //                    System.out.println("| Min Card. |Max Card. |");
                //                    System.out.println("|---|---|");
                //                    System.out.println("| ? | ? |");
                //                    System.out.println();

            }
        }

        //System.out.println("class start: " + classStartRow + ", class end: " + (currentExcelRow -1));
        //We have finished writting properties of one class, now it's time to merge the cells
        int classEndRow = currentExcelRow - 1;
        if (classStartRow < classEndRow) {
            sheet.addMergedRegion(new CellRangeAddress(classStartRow, classEndRow, 0, 0));
        }

    }

    String filename = "3cixty.xls";
    FileOutputStream fileOut = new FileOutputStream(filename);
    wb.write(fileOut);
    fileOut.close();
}

From source file:io.hops.experiments.stats.TransactionStatsAggregator.java

public static Map<String, DescriptiveStatistics> aggregate(File statsFile, String headerPattern,
        String transaction, boolean printSummary) throws IOException {
    if (!statsFile.exists())
        return null;

    transaction = transaction.toUpperCase();

    BufferedReader reader = new BufferedReader(new FileReader(statsFile));
    String tx = reader.readLine();
    String[] headers = null;/*ww w  .j  a  v a 2 s. c om*/
    Map<Integer, DescriptiveStatistics> statistics = Maps.newHashMap();
    if (tx != null) {
        headers = tx.split(",");
        for (int i = 1; i < headers.length; i++) {
            String h = headers[i].toUpperCase();
            if (h.contains(headerPattern) || headerPattern.equals(ALL)) {
                statistics.put(i, new DescriptiveStatistics());
            }
        }
    }

    int txCount = 0;
    while ((tx = reader.readLine()) != null) {
        if (tx.startsWith(transaction) || transaction.equals(ALL)) {
            txCount++;
            String[] txStats = tx.split(",");
            if (txStats.length == headers.length) {
                for (Map.Entry<Integer, DescriptiveStatistics> e : statistics.entrySet()) {
                    e.getValue().addValue(Double.valueOf(txStats[e.getKey()]));
                }
            }
        }
    }

    reader.close();

    if (headers == null)
        return null;

    if (printSummary) {
        System.out.println("Transaction: " + transaction + " " + txCount);

        List<Integer> keys = new ArrayList<Integer>(statistics.keySet());
        Collections.sort(keys);

        for (Integer i : keys) {
            DescriptiveStatistics stats = statistics.get(i);
            if (stats.getMin() == 0 && stats.getMax() == 0) {
                continue;
            }
            System.out.println(headers[i]);
            System.out.println("Min " + stats.getMin() + " Max " + stats.getMax() + " Avg " + stats.getMean()
                    + " Std " + stats.getStandardDeviation());
        }
    }

    Map<String, DescriptiveStatistics> annotatedStats = Maps.newHashMap();
    for (Map.Entry<Integer, DescriptiveStatistics> e : statistics.entrySet()) {
        annotatedStats.put(headers[e.getKey()].trim(), e.getValue());
    }
    return annotatedStats;
}

From source file:com.caseystella.analytics.outlier.streaming.mad.ConfusionMatrix.java

public static void printStats(String title, DescriptiveStatistics scoreStats) {
    System.out.println(//ww w.j  ava 2 s . c  om
            title + ": " + "\n\tMin: " + scoreStats.getMin() + "\n\t1th: " + scoreStats.getPercentile(1)
                    + "\n\t5th: " + scoreStats.getPercentile(5) + "\n\t10th: " + scoreStats.getPercentile(10)
                    + "\n\t25th: " + scoreStats.getPercentile(25) + "\n\t50th: " + scoreStats.getPercentile(50)
                    + "\n\t90th: " + scoreStats.getPercentile(90) + "\n\t95th: " + scoreStats.getPercentile(95)
                    + "\n\t99th: " + scoreStats.getPercentile(99) + "\n\tMax: " + scoreStats.getMax());
}

From source file:async.nio2.Main.java

private static String toEvaluationString(DescriptiveStatistics stats) {
    String data = String.format(
            "0.50 Percentile  = %8.2f, " + "0.90 Percentile = %8.2f, " + "0.99 Percentile = %8.2f, "
                    + "min = %8.2f, " + "max = %8.2f",
            stats.getMean(), stats.getPercentile(90), stats.getPercentile(99), stats.getMin(), stats.getMax());
    stats.clear();/*from w ww  . j  av  a 2  s  .c  o  m*/
    return data;
}

From source file:com.intuit.tank.service.impl.v1.report.SummaryReportRunner.java

/**
 * @param jobId//from   w  w  w.j  a  va 2  s.c om
 * @param key
 * @param stats
 * @return
 */
private static PeriodicData getBucketData(int jobId, String key, BucketDataItem bucketItem) {
    DescriptiveStatistics stats = bucketItem.getStats();
    PeriodicData ret = PeriodicDataBuilder.periodicData().withJobId(jobId).withMax(stats.getMax())
            .withMean(stats.getMean()).withMin(stats.getMin()).withPageId(key)
            .withSampleSize((int) stats.getN()).withPeriod(bucketItem.getPeriod())
            .withTimestamp(bucketItem.getStartTime()).build();
    return ret;
}

From source file:com.github.aptd.simulation.core.statistic.local.CStatistic.java

/**
 * write data//  w  w  w . j a  v  a 2s  . c  om
 *
 * @param p_writer writer instance
 * @param p_name section name
 * @param p_statistic statistic value
 */
private static void apply(final IWriter p_writer, final String p_name,
        final DescriptiveStatistics p_statistic) {
    p_writer.section(1, p_name);

    p_writer.value("geometricmean", p_statistic.getGeometricMean());
    p_writer.value("kurtosis", p_statistic.getKurtosis());
    p_writer.value("max", p_statistic.getMax());
    p_writer.value("min", p_statistic.getMin());
    p_writer.value("mean", p_statistic.getMean());
    p_writer.value("count", p_statistic.getN());
    p_writer.value("25-percentile", p_statistic.getPercentile(0.25));
    p_writer.value("75-percentile", p_statistic.getPercentile(0.75));
    p_writer.value("populationvariance", p_statistic.getPopulationVariance());
    p_writer.value("quadraticmean", p_statistic.getQuadraticMean());
    p_writer.value("standdeviation", p_statistic.getStandardDeviation());
    p_writer.value("skewness", p_statistic.getSkewness());
    p_writer.value("sum", p_statistic.getSum());
    p_writer.value("sumsequared", p_statistic.getSumsq());
    p_writer.value("variance", p_statistic.getVariance());
}

From source file:com.teradata.benchto.service.model.AggregatedMeasurement.java

public static AggregatedMeasurement aggregate(MeasurementUnit unit, Collection<Double> values) {
    if (values.size() < 2) {
        Double value = Iterables.getOnlyElement(values);
        return new AggregatedMeasurement(unit, value, value, value, 0.0, 0.0);
    }/*from ww w  .  j  a va  2s  .c  o m*/
    DescriptiveStatistics statistics = new DescriptiveStatistics(
            values.stream().mapToDouble(Double::doubleValue).toArray());

    double stdDevPercent = 0.0;
    if (statistics.getStandardDeviation() > 0.0) {
        stdDevPercent = (statistics.getStandardDeviation() / statistics.getMean()) * 100;
    }

    return new AggregatedMeasurement(unit, statistics.getMin(), statistics.getMax(), statistics.getMean(),
            statistics.getStandardDeviation(), stdDevPercent);
}

From source file:com.intuit.tank.service.impl.v1.report.SummaryReportRunner.java

/**
 * @param key/*from   ww w  .j a  va 2  s  .  c om*/
 * @param value
 * @return
 */
private static SummaryData getSummaryData(int jobId, String key, DescriptiveStatistics stats) {
    SummaryData ret = SummaryDataBuilder.summaryData().withJobId(jobId)
            .withKurtosis(!Double.isNaN(stats.getKurtosis()) ? stats.getKurtosis() : 0).withMax(stats.getMax())
            .withMean(stats.getMean()).withMin(stats.getMin()).withPageId(key)
            .withPercentile10(stats.getPercentile(10)).withPercentile20(stats.getPercentile(20))
            .withPercentile30(stats.getPercentile(30)).withPercentile40(stats.getPercentile(40))
            .withPercentile50(stats.getPercentile(50)).withPercentile60(stats.getPercentile(60))
            .withPercentile70(stats.getPercentile(70)).withPercentile80(stats.getPercentile(80))
            .withPercentile90(stats.getPercentile(90)).withPercentile95(stats.getPercentile(95))
            .withPercentile99(stats.getPercentile(99)).withSampleSize((int) stats.getN())
            .withSkewness(!Double.isNaN(stats.getSkewness()) ? stats.getSkewness() : 0)
            .withSttDev(!Double.isNaN(stats.getStandardDeviation()) ? stats.getStandardDeviation() : 0)
            .withVarience(!Double.isNaN(stats.getVariance()) ? stats.getVariance() : 0).build();
    return ret;
}

From source file:com.intuit.tank.vm.common.util.ReportUtil.java

public static final String[] getSummaryData(String key, DescriptiveStatistics stats) {
    String[] ret = new String[ReportUtil.SUMMARY_HEADERS.length + PERCENTILES.length];
    int i = 0;/*from  ww  w . java  2 s  .c o  m*/
    ret[i++] = key;// Page ID
    ret[i++] = INT_NF.format(stats.getN());// Sample Size
    ret[i++] = DOUBLE_NF.format(stats.getMean());// Mean
    ret[i++] = INT_NF.format(stats.getPercentile(50));// Meadian
    ret[i++] = INT_NF.format(stats.getMin());// Min
    ret[i++] = INT_NF.format(stats.getMax());// Max
    ret[i++] = DOUBLE_NF.format(stats.getStandardDeviation());// Std Dev
    ret[i++] = DOUBLE_NF.format(stats.getKurtosis());// Kurtosis
    ret[i++] = DOUBLE_NF.format(stats.getSkewness());// Skewness
    ret[i++] = DOUBLE_NF.format(stats.getVariance());// Varience
    for (int n = 0; n < PERCENTILES.length; n++) {
        ret[i++] = INT_NF.format(stats.getPercentile((Integer) PERCENTILES[n][1]));// Percentiles
    }
    return ret;
}