Example usage for org.apache.commons.math3.stat.descriptive DescriptiveStatistics DescriptiveStatistics

List of usage examples for org.apache.commons.math3.stat.descriptive DescriptiveStatistics DescriptiveStatistics

Introduction

In this page you can find the example usage for org.apache.commons.math3.stat.descriptive DescriptiveStatistics DescriptiveStatistics.

Prototype

public DescriptiveStatistics() 

Source Link

Document

Construct a DescriptiveStatistics instance with an infinite window

Usage

From source file:main.java.metric.Metric.java

public static double getCVServerData(Cluster cluster) {
    DescriptiveStatistics server_data = new DescriptiveStatistics();

    for (Server server : cluster.getServers())
        server_data.addValue(server.getServer_total_data());

    double c_v = server_data.getStandardDeviation() / server_data.getMean();
    return c_v;/*ww w .  j  ava 2 s.com*/
}

From source file:com.caseystella.analytics.distribution.Distribution.java

public static double getMadScore(Iterable<Double> vals, Double val) {
    DescriptiveStatistics stats = new DescriptiveStatistics();
    DescriptiveStatistics medianStats = new DescriptiveStatistics();
    for (Double v : vals) {
        stats.addValue(v);// ww w  .  j  a va 2 s. c  o m
    }
    double median = stats.getPercentile(50);
    for (Double v : vals) {
        medianStats.addValue(Math.abs(v - median));
    }
    double mad = medianStats.getPercentile(50);
    return Math.abs(0.6745 * (val - median) / mad);
}

From source file:io.yields.math.framework.DomainTest.java

@Explore(name = "Test Variable Distribution with multiple properties", dataProvider = DataProviders.FixedMersenneTwisterDataProvider.class)
@Exploration(name = "Test Function", context = FunctionExplorerMultiplePropertiesContext.class, group = "domain")
public void testVariableDistributionMultipleProperties(Explorer<Double> explorer) {

    assertThat(explorer.all().count()).isEqualTo(explorer.valid().count());
    assertThat(explorer.valid().count()).isEqualTo(explorer.valid().count());
    assertThat(explorer.invalid().count()).isEqualTo(0);
    assertThat(explorer.propertyError().count()).isEqualTo(0);

    DescriptiveStatistics stats = new DescriptiveStatistics();
    explorer.all().forEach(result -> stats.addValue(result.getFunctionOutcome().orElse(0d)));

    assertThat(stats.getMean()).isEqualTo(0, delta(0.1));
    assertThat(stats.getMax()).isEqualTo(1, delta(0.1));
    assertThat(stats.getMin()).isEqualTo(-1, delta(0.1));
}

From source file:gdsc.smlm.ij.plugins.SpotInspector.java

public void run(String arg) {
    if (MemoryPeakResults.countMemorySize() == 0) {
        IJ.error(TITLE, "No localisations in memory");
        return;//from   ww w.j ava 2  s  .  c  om
    }

    if (!showDialog())
        return;

    // Load the results
    results = ResultsManager.loadInputResults(inputOption, false);
    if (results == null || results.size() == 0) {
        IJ.error(TITLE, "No results could be loaded");
        IJ.showStatus("");
        return;
    }

    // Check if the original image is open
    ImageSource source = results.getSource();
    if (source == null) {
        IJ.error(TITLE, "Unknown original source image");
        return;
    }
    source = source.getOriginal();
    if (!source.open()) {
        IJ.error(TITLE, "Cannot open original source image: " + source.toString());
        return;
    }
    final float stdDevMax = getStandardDeviation(results);
    if (stdDevMax < 0) {
        // TODO - Add dialog to get the initial peak width
        IJ.error(TITLE, "Fitting configuration (for initial peak width) is not available");
        return;
    }

    // Rank spots
    rankedResults = new ArrayList<PeakResultRank>(results.size());
    final double a = results.getNmPerPixel();
    final double gain = results.getGain();
    final boolean emCCD = results.isEMCCD();

    for (PeakResult r : results.getResults()) {
        float[] score = getScore(r, a, gain, emCCD, stdDevMax);
        rankedResults.add(new PeakResultRank(r, score[0], score[1]));
    }
    Collections.sort(rankedResults);

    // Prepare results table. Get bias if necessary
    if (showCalibratedValues) {
        // Get a bias if required
        Calibration calibration = results.getCalibration();
        if (calibration.bias == 0) {
            GenericDialog gd = new GenericDialog(TITLE);
            gd.addMessage("Calibrated results requires a camera bias");
            gd.addNumericField("Camera_bias (ADUs)", calibration.bias, 2);
            gd.showDialog();
            if (!gd.wasCanceled()) {
                calibration.bias = Math.abs(gd.getNextNumber());
            }
        }
    }

    IJTablePeakResults table = new IJTablePeakResults(false, results.getName(), true);
    table.copySettings(results);
    table.setTableTitle(TITLE);
    table.setAddCounter(true);
    table.setShowCalibratedValues(showCalibratedValues);
    table.begin();

    // Add a mouse listener to jump to the frame for the clicked line
    textPanel = table.getResultsWindow().getTextPanel();

    // We must ignore old instances of this class from the mouse listeners
    id = ++currentId;
    textPanel.addMouseListener(this);

    // Add results to the table
    int n = 0;
    for (PeakResultRank rank : rankedResults) {
        rank.rank = n++;
        PeakResult r = rank.peakResult;
        table.add(r.peak, r.origX, r.origY, r.origValue, r.error, r.noise, r.params, r.paramsStdDev);
    }
    table.end();

    if (plotScore || plotHistogram) {
        // Get values for the plots
        float[] xValues = null, yValues = null;
        double yMin, yMax;

        int spotNumber = 0;
        xValues = new float[rankedResults.size()];
        yValues = new float[xValues.length];
        for (PeakResultRank rank : rankedResults) {
            xValues[spotNumber] = spotNumber + 1;
            yValues[spotNumber++] = recoverScore(rank.score);
        }

        // Set the min and max y-values using 1.5 x IQR 
        DescriptiveStatistics stats = new DescriptiveStatistics();
        for (float v : yValues)
            stats.addValue(v);
        if (removeOutliers) {
            double lower = stats.getPercentile(25);
            double upper = stats.getPercentile(75);
            double iqr = upper - lower;

            yMin = FastMath.max(lower - iqr, stats.getMin());
            yMax = FastMath.min(upper + iqr, stats.getMax());

            IJ.log(String.format("Data range: %f - %f. Plotting 1.5x IQR: %f - %f", stats.getMin(),
                    stats.getMax(), yMin, yMax));
        } else {
            yMin = stats.getMin();
            yMax = stats.getMax();

            IJ.log(String.format("Data range: %f - %f", yMin, yMax));
        }

        plotScore(xValues, yValues, yMin, yMax);
        plotHistogram(yValues, yMin, yMax);
    }

    // Extract spots into a stack
    final int w = source.getWidth();
    final int h = source.getHeight();
    final int size = 2 * radius + 1;
    ImageStack spots = new ImageStack(size, size, rankedResults.size());

    // To assist the extraction of data from the image source, process them in time order to allow 
    // frame caching. Then set the appropriate slice in the result stack
    Collections.sort(rankedResults, new Comparator<PeakResultRank>() {
        public int compare(PeakResultRank o1, PeakResultRank o2) {
            if (o1.peakResult.peak < o2.peakResult.peak)
                return -1;
            if (o1.peakResult.peak > o2.peakResult.peak)
                return 1;
            return 0;
        }
    });

    for (PeakResultRank rank : rankedResults) {
        PeakResult r = rank.peakResult;

        // Extract image
        // Note that the coordinates are relative to the middle of the pixel (0.5 offset)
        // so do not round but simply convert to int
        final int x = (int) (r.params[Gaussian2DFunction.X_POSITION]);
        final int y = (int) (r.params[Gaussian2DFunction.Y_POSITION]);

        // Extract a region but crop to the image bounds
        int minX = x - radius;
        int minY = y - radius;
        int maxX = FastMath.min(x + radius + 1, w);
        int maxY = FastMath.min(y + radius + 1, h);

        int padX = 0, padY = 0;
        if (minX < 0) {
            padX = -minX;
            minX = 0;
        }
        if (minY < 0) {
            padY = -minY;
            minY = 0;
        }
        int sizeX = maxX - minX;
        int sizeY = maxY - minY;

        float[] data = source.get(r.peak, new Rectangle(minX, minY, sizeX, sizeY));
        // Prevent errors with missing data
        if (data == null)
            data = new float[sizeX * sizeY];
        ImageProcessor spotIp = new FloatProcessor(sizeX, sizeY, data, null);

        // Pad if necessary, i.e. the crop is too small for the stack
        if (padX > 0 || padY > 0 || sizeX < size || sizeY < size) {
            ImageProcessor spotIp2 = spotIp.createProcessor(size, size);
            spotIp2.insert(spotIp, padX, padY);
            spotIp = spotIp2;
        }
        int slice = rank.rank + 1;
        spots.setPixels(spotIp.getPixels(), slice);
        spots.setSliceLabel(Utils.rounded(rank.originalScore), slice);
    }

    source.close();

    ImagePlus imp = Utils.display(TITLE, spots);
    imp.setRoi((PointRoi) null);

    // Make bigger      
    for (int i = 10; i-- > 0;)
        imp.getWindow().getCanvas().zoomIn(imp.getWidth() / 2, imp.getHeight() / 2);
}

From source file:com.mapd.bench.BenchmarkCloud.java

String executeQuery(Connection conn1, String qid, String sql, int iterations) {
    Statement stmt = null;//from   w  ww  .j ava 2 s  .  c  om
    Connection conn = getConnection(url, iUser, iPasswd);

    Long firstExecute = 0l;
    Long firstJdbc = 0l;
    Long firstIterate = 0l;

    DescriptiveStatistics statsExecute = new DescriptiveStatistics();
    DescriptiveStatistics statsJdbc = new DescriptiveStatistics();
    DescriptiveStatistics statsIterate = new DescriptiveStatistics();
    DescriptiveStatistics statsTotal = new DescriptiveStatistics();

    long totalTime = 0;
    int resultCount = 0;
    try {

        long startTime = System.currentTimeMillis();
        for (int loop = 0; loop < iterations; loop++) {

            //Execute a query
            stmt = conn.createStatement();

            long timer = System.currentTimeMillis();
            if (loop == 0) {
                System.out.println(String.format("Query Id is %s : query is '%s'", qid, sql));
            }
            ResultSet rs = stmt.executeQuery(sql);

            long executeTime = 0;
            long jdbcTime = 0;

            // gather internal execute time for MapD as we are interested in that
            if (driver.equals(JDBC_DRIVER)) {
                executeTime = stmt.getQueryTimeout();
                jdbcTime = (System.currentTimeMillis() - timer) - executeTime;
            } else {
                jdbcTime = (System.currentTimeMillis() - timer);
                executeTime = 0;
            }
            // this is fake to get our intenal execute time.
            logger.debug("Query Timeout/AKA internal Execution Time was " + stmt.getQueryTimeout()
                    + " ms Elapsed time in JVM space was " + (System.currentTimeMillis() - timer) + "ms");

            timer = System.currentTimeMillis();
            //Extract data from result set
            resultCount = 0;
            while (rs.next()) {
                Object obj = rs.getObject(1);
                if (obj != null && obj.equals(statsExecute)) {
                    logger.info("Impossible");
                }
                resultCount++;
            }
            long iterateTime = (System.currentTimeMillis() - timer);

            //        if (resultCount != expected) {
            //          logger.error("Expect " + expected + " actual " + resultCount + " for query " + sql);
            //          // don't run anymore
            //          break;
            //        }
            if (loop == 0) {
                firstJdbc = jdbcTime;
                firstExecute = executeTime;
                firstIterate = iterateTime;

            } else {
                statsJdbc.addValue(jdbcTime);
                statsExecute.addValue(executeTime);
                statsIterate.addValue(iterateTime);
                statsTotal.addValue(jdbcTime + executeTime + iterateTime);
            }

            //Clean-up environment
            rs.close();
            stmt.close();
        }
        totalTime = System.currentTimeMillis() - startTime;
        conn.close();
    } catch (SQLException se) {
        //Handle errors for JDBC
        se.printStackTrace();
        System.exit(4);
    } catch (Exception e) {
        //Handle errors for Class.forName
        e.printStackTrace();
        System.exit(3);
    } finally {
        //finally block used to close resources
        try {
            if (stmt != null) {
                stmt.close();
            }
        } catch (SQLException se2) {
        } // nothing we can do
        try {
            if (conn != null) {
                conn.close();
            }
        } catch (SQLException se) {
            se.printStackTrace();
            System.exit(6);
        } //end finally try
    } //end try

    // write it to the db here as well
    String insertPart = String.format(insertDescriptor, this.rid, this.rTimestamp, url, this.driver, label,
            gpuCount, this.tableName, qid, resultCount, "", statsTotal.getMean(), statsTotal.getMin(),
            statsTotal.getMax(), statsTotal.getPercentile(85), statsExecute.getMean(), statsExecute.getMin(),
            statsExecute.getMax(), statsExecute.getPercentile(85), statsExecute.getPercentile(25),
            statsExecute.getStandardDeviation(), statsJdbc.getMean(), statsJdbc.getMin(), statsJdbc.getMax(),
            statsJdbc.getPercentile(85), statsIterate.getMean(), statsIterate.getMin(), statsIterate.getMax(),
            statsIterate.getPercentile(85), firstExecute, firstJdbc, firstIterate, iterations, totalTime,
            (long) statsTotal.getSum() + firstExecute + firstJdbc + firstIterate, targetDBVersion);

    LResult.add("Insert into results values " + insertPart);

    return String.format(lineDescriptor, qid, statsTotal.getMean(), statsTotal.getMin(), statsTotal.getMax(),
            statsTotal.getPercentile(85), statsExecute.getMean(), statsExecute.getMin(), statsExecute.getMax(),
            statsExecute.getPercentile(85), statsExecute.getPercentile(25), statsExecute.getStandardDeviation(),
            statsJdbc.getMean(), statsJdbc.getMin(), statsJdbc.getMax(), statsJdbc.getPercentile(85),
            statsIterate.getMean(), statsIterate.getMin(), statsIterate.getMax(),
            statsIterate.getPercentile(85), firstExecute, firstJdbc, firstIterate, iterations, totalTime,
            (long) statsTotal.getSum() + firstExecute + firstJdbc + firstIterate);

}

From source file:main.java.metric.Metric.java

public static double getCVServerData(Cluster cluster, Transaction tr) {
    DescriptiveStatistics server_data = new DescriptiveStatistics();

    for (Entry<Integer, HashSet<Integer>> entry : tr.getTr_serverSet().entrySet()) {
        server_data.addValue(entry.getValue().size());
    }//  w w w . j  a  v a2 s . c  om

    double c_v = server_data.getStandardDeviation() / server_data.getMean();
    return c_v;
}

From source file:edu.ucsc.barrel.cdf_gen.CDF_Gen.java

public static void fill511Gaps() {
    int size = data.getSize("mod32"), step_size = 1, start = 0;
    double delta, std_dev, med;
    float m, b, //values used for interpolating data        
            fill = (Float) CDFVar.getIstpVal("FLOAT_FILL"), new_value = fill, last_value = fill;
    DescriptiveStatistics stats = new DescriptiveStatistics();

    //generate statistics on the 511 peak jump sizes
    for (int peak_i = 0; peak_i < (size - 1); peak_i++) {
        if (data.peak511_bin[peak_i] == fill) {
            continue;
        }/*from  ww  w.  j av a  2  s .c  om*/
        if (data.peak511_bin[peak_i + 1] == fill) {
            continue;
        }

        delta = data.peak511_bin[peak_i + 1] - data.peak511_bin[peak_i];
        if (delta != 0) {
            stats.addValue(delta);
        }
    }
    std_dev = stats.getStandardDeviation();
    med = stats.getPercentile(50);

    //find first good value
    for (start = 0; start < size; start++) {
        if (data.peak511_bin[start] != fill) {
            new_value = data.peak511_bin[start];
            last_value = data.peak511_bin[start];
            break;
        }
    }

    //fill any missing data before the first point
    Arrays.fill(data.peak511_bin, 0, start, new_value);

    for (int filler_i = start + 1; filler_i < size; filler_i++) {
        if (data.peak511_bin[filler_i] == fill) {
            //temporarily fill the gap with the last good value 
            //this is done in case there is not another good value
            //to use for interpolation
            data.peak511_bin[filler_i] = last_value;
            step_size++;
        } else {
            //make sure jump size wasn't too big
            delta = data.peak511_bin[filler_i] - data.peak511_bin[filler_i - 1];
            // if(Math.abs(delta - med) > (std_dev * 3)){
            //    data.peak511_bin[filler_i] = last_value;
            //    step_size++;
            // }

            last_value = new_value;
            new_value = data.peak511_bin[filler_i];

            //fill any gaps
            if (step_size > 1) {
                m = (last_value - new_value) / step_size;
                b = new_value - (m * filler_i);

                for (int fill_i = filler_i - step_size; fill_i < filler_i; fill_i++) {
                    data.peak511_bin[fill_i] = m * fill_i + b;
                }

                step_size = 1;
            }
        }
    }
}

From source file:com.intuit.tank.persistence.databases.MetricsCalculator.java

/**
 * @param loggingKey/*from www .  j a  va 2  s  . com*/
 * @param periodDate
 * @return
 */
private DescriptiveStatistics getBucketStats(String loggingKey, int period, Date periodDate) {
    Map<Date, BucketDataItem> map = bucketItems.get(loggingKey);
    if (map == null) {
        map = new HashMap<Date, BucketDataItem>();
        bucketItems.put(loggingKey, map);
    }
    BucketDataItem bucketDataItem = map.get(periodDate);
    if (bucketDataItem == null) {
        bucketDataItem = new BucketDataItem(period, periodDate, new DescriptiveStatistics());
        map.put(periodDate, bucketDataItem);
    }
    return bucketDataItem.getStats();
}

From source file:com.tascape.reactor.report.SuiteResultView.java

private void processMetrics() {
    Map<String, Map<String, Object>> tm = new HashMap<>();
    this.caseMetrics.forEach(row -> {
        String key = row.get(CaseResultMetric.METRIC_GROUP) + "." + row.get(CaseResultMetric.METRIC_NAME);
        Map<String, Object> r = tm.get(key);
        if (r == null) {
            tm.put(key, row);// w w w. ja v a 2  s.c o m
            List<Double> values = new ArrayList<>();
            values.add((double) row.get(CaseResultMetric.METRIC_VALUE));
            row.put("values", values);
        } else {
            @SuppressWarnings("unchecked")
            List<Double> values = (List<Double>) r.get("values");
            values.add((double) row.get(CaseResultMetric.METRIC_VALUE));
        }
    });

    tm.values().stream().forEach(row -> {
        @SuppressWarnings("unchecked")
        List<Double> values = (List<Double>) row.get("values");
        if (values.size() > 1) {
            DescriptiveStatistics stats = new DescriptiveStatistics();
            values.forEach(v -> stats.addValue(v));
            row.put("max", stats.getMax());
            row.put("min", stats.getMin());
            row.put("mean", stats.getMean());
            row.put("size", values.size());
        }
    });

    this.caseMetrics = new ArrayList<>(tm.values());
}

From source file:main.java.metric.Metric.java

public static double getMeanServerData(Cluster cluster) {
    DescriptiveStatistics server_data = new DescriptiveStatistics();

    for (Server server : cluster.getServers())
        server_data.addValue(server.getServer_total_data());

    return server_data.getMean();
}