Example usage for org.apache.commons.math3.stat.descriptive DescriptiveStatistics DescriptiveStatistics

List of usage examples for org.apache.commons.math3.stat.descriptive DescriptiveStatistics DescriptiveStatistics

Introduction

In this page you can find the example usage for org.apache.commons.math3.stat.descriptive DescriptiveStatistics DescriptiveStatistics.

Prototype

public DescriptiveStatistics() 

Source Link

Document

Construct a DescriptiveStatistics instance with an infinite window

Usage

From source file:nars.truth.Truth.java

/** provides a statistics summary (mean, min, max, variance, etc..) of a particular TruthValue component across a given list of Truthables (sentences, TruthValue's, etc..).  null values in the iteration are ignored */
@NotNull//from   w  w  w .j  a v  a 2  s  . com
static DescriptiveStatistics statistics(@NotNull Iterable<? extends Truthed> t,
        @NotNull TruthComponent component) {
    DescriptiveStatistics d = new DescriptiveStatistics();
    for (Truthed x : t) {
        Truth v = x.truth();
        if (v != null)
            d.addValue(v.getComponent(component));
    }
    return d;
}

From source file:com.kircherelectronics.accelerationexplorer.data.SampleAxisState.java

/**
 * Stop recording sample measurements.//  w w w.  j a v a 2  s. c  o  m
 */
@Override
public void stopSample() {
    sampler.setSampling(false);

    DescriptiveStatistics stats = new DescriptiveStatistics();

    // Add the data from the array
    for (int i = 0; i < acceleration.length; i++) {
        stats.addValue(acceleration[i]);
    }

    sampleRMS = stats.getStandardDeviation();

    sampleMean = StatUtils.mean(acceleration);

    sampleMax = StatUtils.max(acceleration);
    sampleMin = StatUtils.min(acceleration);

}

From source file:cc.kave.commons.pointsto.evaluation.TimeEvaluation.java

private DescriptiveStatistics measurePointerAnalysis(List<Context> contexts, PointsToAnalysisFactory ptFactory,
        MutableLong sink) {/*from  w w w .  j  a  v  a 2  s. c  om*/
    DescriptiveStatistics stats = new DescriptiveStatistics();

    for (Context context : contexts) {
        PointsToAnalysis ptAnalysis = ptFactory.create();
        Stopwatch watch = Stopwatch.createStarted();
        PointsToContext ptContext = ptAnalysis.compute(context);
        watch.stop();
        sink.add(ptContext.hashCode());
        long time = watch.elapsed(TimeUnit.MICROSECONDS);
        stats.addValue(time);

        analysisTimes.add(new AnalysisTimeEntry(ptFactory.getName(),
                context.getTypeShape().getTypeHierarchy().getElement(), stmtCounts.get(context), time));
    }

    return stats;
}

From source file:iDynoOptimizer.MOEAFramework26.src.org.moeaframework.analysis.diagnostics.LinePlot.java

/**
 * Generates the quantile series for the specified key.
 * //from  w w  w  .jav a 2s. co m
 * @param key the key identifying which result to plot
 * @param dataset the dataset to store the generated series
 */
protected void generateQuantileSeries(ResultKey key, YIntervalSeriesCollection dataset) {
    List<DataPoint> dataPoints = new ArrayList<DataPoint>();

    for (Accumulator accumulator : controller.get(key)) {
        if (!accumulator.keySet().contains(metric)) {
            continue;
        }

        for (int i = 0; i < accumulator.size(metric); i++) {
            dataPoints.add(new DataPoint((Integer) accumulator.get("NFE", i),
                    ((Number) accumulator.get(metric, i)).doubleValue()));
        }
    }

    Collections.sort(dataPoints);

    YIntervalSeries series = new YIntervalSeries(key);
    DescriptiveStatistics statistics = new DescriptiveStatistics();
    int index = 0;
    int currentNFE = RESOLUTION;

    while (index < dataPoints.size()) {
        DataPoint point = dataPoints.get(index);

        if (point.getNFE() <= currentNFE) {
            statistics.addValue(point.getValue());
            index++;
        } else {
            if (statistics.getN() > 0) {
                series.add(currentNFE, statistics.getPercentile(50), statistics.getPercentile(25),
                        statistics.getPercentile(75));
            }

            statistics.clear();
            currentNFE += RESOLUTION;
        }
    }

    if (statistics.getN() > 0) {
        //if only entry, add extra point to display non-zero width
        if (series.isEmpty()) {
            series.add(currentNFE - RESOLUTION, statistics.getPercentile(50), statistics.getPercentile(25),
                    statistics.getPercentile(75));
        }

        series.add(currentNFE, statistics.getPercentile(50), statistics.getPercentile(25),
                statistics.getPercentile(75));
    }

    dataset.addSeries(series);
}

From source file:gr.iti.mklab.reveal.forensics.maps.dq.DQExtractor.java

public void detectDQDiscontinuities() {

    int imWidth = dcts.length;
    int imHeight = dcts[0].length;

    int[] p_h_avg = new int[maxCoeffs];
    int[] p_h_fft = new int[maxCoeffs];
    int[] p_final = new int[maxCoeffs];

    double[][] pTampered = new double[maxCoeffs][];
    double[][] pUntampered = new double[maxCoeffs][];

    for (int coeffIndex = 0; coeffIndex < maxCoeffs; coeffIndex++) {

        int coe = coeff[coeffIndex];
        int startY = coe % 8 - 1;
        if (startY == -1) {
            startY = 8;/*  w  w  w .j av a  2 s .  c  om*/
        }
        int startX = (int) Math.floor((coe - 1) / 8);

        List<Integer> selectedCoeffs = new ArrayList<Integer>();
        for (int ii = startX; ii < imWidth; ii += 8) {
            for (int jj = startY; jj < imHeight; jj += 8) {
                selectedCoeffs.add(dcts[ii][jj]);
            }
        }

        int minCoeffValue = Collections.min(selectedCoeffs);
        int maxCoeffValue = Collections.max(selectedCoeffs);
        int s_0;
        Double[] coeffHist = new Double[0];
        if (maxCoeffValue - minCoeffValue > 0) {
            //will be a power of 2 to allow for fft (zero padded)
            int trueHistRange = maxCoeffValue - minCoeffValue + 1;
            //int histLength = trueHistRange;
            int histLength = (int) Math.pow(2, Math.ceil(Math.log(trueHistRange) / Math.log(2)));

            coeffHist = new Double[histLength];

            for (int ii = 0; ii < coeffHist.length; ii++) {
                coeffHist[ii] = 0.0;
            }

            for (Integer selectedCoeff : selectedCoeffs) {
                coeffHist[selectedCoeff - minCoeffValue] += 1;
            }

            List<Double> coeffHistList = Arrays.asList(coeffHist);
            s_0 = coeffHistList.indexOf(Collections.max(coeffHistList));

            List<Double> h = new ArrayList<>();
            DescriptiveStatistics vals;
            for (int coeffInd = 1; coeffInd < coeffHistList.size(); coeffInd++) {
                vals = new DescriptiveStatistics();
                for (int leapInd = s_0; leapInd < coeffHistList.size(); leapInd += coeffInd) {
                    vals.addValue(coeffHistList.get(leapInd));
                }
                for (int leapInd = s_0 - coeffInd; leapInd >= 0; leapInd -= coeffInd) {
                    vals.addValue(coeffHistList.get(leapInd));
                }
                h.add(vals.getMean());
            }
            p_h_avg[coeffIndex] = (h.indexOf(Collections.max(h)));

            FastFourierTransformer fastFourierTransformer = new FastFourierTransformer(
                    DftNormalization.STANDARD);
            Complex[] fft = fastFourierTransformer.transform(ArrayUtils.toPrimitive(coeffHist),
                    TransformType.FORWARD);

            double[] power = new double[fft.length];
            for (int ii = 0; ii < power.length; ii++) {
                power[ii] = fft[ii].abs();
            }

            //Find first local minimum, to bypass DC peak
            double DC = power[0];
            int FreqValley = 1;
            while (FreqValley < power.length - 1 & power[FreqValley] >= power[FreqValley + 1]) {
                FreqValley++;
            }

            int maxFFTInd = 0;
            double maxFFTVal = 0;
            double minFFTVal = Double.MAX_VALUE;
            for (int ii = FreqValley; ii < power.length / 2; ii++) {
                if (power[ii] > maxFFTVal) {
                    maxFFTInd = ii;
                    maxFFTVal = power[ii];
                }
                if (power[ii] < minFFTVal) {
                    minFFTVal = power[ii];
                }
            }
            if (maxFFTInd == 0 | maxFFTVal < (DC / 5) | minFFTVal / maxFFTVal > 0.9) {
                p_h_fft[coeffIndex] = 1;
            } else {
                p_h_fft[coeffIndex] = Math.round(coeffHist.length / maxFFTInd);
            }

        } else {
            p_h_avg[coeffIndex] = 1;
            p_h_fft[coeffIndex] = 1;
            s_0 = 0;
        }
        if (p_h_avg[coeffIndex] < p_h_fft[coeffIndex]) {
            p_final[coeffIndex] = p_h_avg[coeffIndex];
        } else {
            p_final[coeffIndex] = p_h_fft[coeffIndex];
        }

        pTampered[coeffIndex] = new double[selectedCoeffs.size()];
        pUntampered[coeffIndex] = new double[selectedCoeffs.size()];
        int[] adjustedCoeffs = new int[selectedCoeffs.size()];
        int[] period_start = new int[selectedCoeffs.size()];
        int[] period;
        int[] num = new int[selectedCoeffs.size()];
        int[] denom = new int[selectedCoeffs.size()];
        double[] P_u = new double[selectedCoeffs.size()];
        double[] P_t = new double[selectedCoeffs.size()];

        if (p_final[coeffIndex] != 1) {
            for (int ii = 0; ii < adjustedCoeffs.length; ii++) {
                adjustedCoeffs[ii] = selectedCoeffs.get(ii) - minCoeffValue;
                period_start[ii] = adjustedCoeffs[ii] - rem(adjustedCoeffs[ii] - s_0, p_final[coeffIndex]);
            }
            for (int kk = 0; kk < selectedCoeffs.size(); kk++) {
                if (period_start[kk] > s_0) {
                    period = new int[p_final[coeffIndex]];
                    for (int ii = 0; ii < p_final[coeffIndex]; ii++) {
                        period[ii] = period_start[kk] + ii;
                        if (period[ii] >= coeffHist.length) {
                            period[ii] = period[ii] - p_final[coeffIndex];
                        }
                    }
                    num[kk] = (int) coeffHist[adjustedCoeffs[kk]].doubleValue();
                    denom[kk] = 0;
                    for (int ll = 0; ll < period.length; ll++) {
                        denom[kk] = denom[kk] + (int) coeffHist[period[ll]].doubleValue();
                    }
                } else {
                    period = new int[p_final[coeffIndex]];
                    for (int ii = 0; ii < p_final[coeffIndex]; ii++) {
                        period[ii] = period_start[kk] - ii;
                        if (period_start[kk] - p_final[coeffIndex] + 1 <= 0) {
                            if (period[ii] <= 0) {
                                period[ii] = period[ii] + p_final[coeffIndex];
                            }
                        }
                    }
                    num[kk] = (int) coeffHist[adjustedCoeffs[kk]].doubleValue();
                    denom[kk] = 0;
                    for (int ll = 0; ll < period.length; ll++) {
                        denom[kk] = denom[kk] + (int) coeffHist[period[ll]].doubleValue();
                    }
                }

                P_u[kk] = ((double) num[kk] / denom[kk]);
                P_t[kk] = (1.0 / p_final[coeffIndex]);
                if (P_u[kk] + P_t[kk] != 0) {
                    pTampered[coeffIndex][kk] = P_t[kk] / (P_u[kk] + P_t[kk]);
                    pUntampered[coeffIndex][kk] = P_u[kk] / (P_u[kk] + P_t[kk]);

                } else {
                    pTampered[coeffIndex][kk] = 0.5;
                    pUntampered[coeffIndex][kk] = 0.5;
                }
            }

        } else {
            for (int kk = 0; kk < selectedCoeffs.size(); kk++) {
                pTampered[coeffIndex][kk] = 0.5;
                pUntampered[coeffIndex][kk] = 0.5;
            }
        }

    }
    double[] pTamperedOverall = new double[pTampered[0].length];
    double pTamperedProd;
    double pUntamperedProd;

    for (int locationIndex = 0; locationIndex < pTampered[0].length; locationIndex++) {
        pTamperedProd = 1;
        pUntamperedProd = 1;
        for (int coeffIndex = 0; coeffIndex < pTampered.length; coeffIndex++) {
            pTamperedProd = pTamperedProd * pTampered[coeffIndex][locationIndex];
            pUntamperedProd = pUntamperedProd * pUntampered[coeffIndex][locationIndex];
        }
        if (pTamperedProd + pUntamperedProd != 0) {
            pTamperedOverall[locationIndex] = pTamperedProd / (pTamperedProd + pUntamperedProd);
        } else {
            pTamperedOverall[locationIndex] = 0;
        }
    }

    int blocksH = imWidth / 8;
    int blocksV = imHeight / 8;
    double[][] outputMap = new double[blocksV][blocksH];
    for (int kk = 0; kk < pTamperedOverall.length; kk++) {
        outputMap[kk % blocksV][(int) Math.floor(kk / blocksV)] = pTamperedOverall[kk];
        if (pTamperedOverall[kk] > maxProbValue) {
            maxProbValue = pTamperedOverall[kk];
        }
        if (pTamperedOverall[kk] < minProbValue) {
            minProbValue = pTamperedOverall[kk];
        }
    }
    probabilityMap = outputMap;
    BufferedImage outputIm = visualizeWithJet(outputMap);
    // output
    displaySurface = outputIm;
}

From source file:gobblin.salesforce.SalesforceSource.java

String generateSpecifiedPartitions(Histogram histogram, int maxPartitions, long expectedHighWatermark) {
    long interval = DoubleMath.roundToLong((double) histogram.totalRecordCount / maxPartitions,
            RoundingMode.CEILING);
    int totalGroups = histogram.getGroups().size();

    log.info("Histogram total record count: " + histogram.totalRecordCount);
    log.info("Histogram total groups: " + totalGroups);
    log.info("maxPartitions: " + maxPartitions);
    log.info("interval: " + interval);

    List<HistogramGroup> groups = histogram.getGroups();
    List<String> partitionPoints = new ArrayList<>();
    DescriptiveStatistics statistics = new DescriptiveStatistics();

    int count = 0;
    HistogramGroup group;// ww  w  .  j a v a 2 s .co m
    Iterator<HistogramGroup> it = groups.iterator();
    while (it.hasNext()) {
        group = it.next();
        if (count == 0) {
            // Add a new partition point;
            partitionPoints
                    .add(Utils.toDateTimeFormat(group.getKey(), DAY_FORMAT, Partitioner.WATERMARKTIMEFORMAT));
        }

        // Move the candidate to a new bucket if the attempted total is 2x of interval
        if (count != 0 && count + group.count >= 2 * interval) {
            // Summarize current group
            statistics.addValue(count);
            // A step-in start
            partitionPoints
                    .add(Utils.toDateTimeFormat(group.getKey(), DAY_FORMAT, Partitioner.WATERMARKTIMEFORMAT));
            count = group.count;
        } else {
            // Add group into current partition
            count += group.count;
        }

        if (count >= interval) {
            // Summarize current group
            statistics.addValue(count);
            // A fresh start next time
            count = 0;
        }
    }

    // If the last group is used as the last partition point
    if (count == 0) {
        // Exchange the last partition point with global high watermark
        partitionPoints.set(partitionPoints.size() - 1, Long.toString(expectedHighWatermark));
    } else {
        // Summarize last group
        statistics.addValue(count);
        // Add global high watermark as last point
        partitionPoints.add(Long.toString(expectedHighWatermark));
    }

    log.info("Dynamic partitioning statistics: ");
    log.info("data: " + Arrays.toString(statistics.getValues()));
    log.info(statistics.toString());
    String specifiedPartitions = Joiner.on(",").join(partitionPoints);
    log.info("Calculated specified partitions: " + specifiedPartitions);
    return specifiedPartitions;
}

From source file:de.fhg.igd.iva.explorer.main.CompareViewPanel.java

private DescriptiveStatistics computeStats(ColormapQuality metric) {
    DescriptiveStatistics stats = new DescriptiveStatistics();

    for (Colormap cm : table.rowKeySet()) {
        double quality = table.get(cm, metric);
        stats.addValue(quality);//  w  w  w  .j a v a2 s .  com
    }

    return stats;
}

From source file:com.linuxbox.enkive.statistics.consolidation.EmbeddedConsolidator.java

@Override
protected void consolidateMaps(Map<String, Object> consolidatedData, List<Map<String, Object>> serviceData,
        ConsolidationKeyHandler keyDef, LinkedList<String> dataPath) {
    Map<String, Object> statConsolidatedData = new HashMap<String, Object>();
    if (keyDef.getMethods() != null) {
        // loop over stat consolidation methods
        Collection<String> methods = new LinkedList<String>(keyDef.getMethods());
        if (!keyDef.isPoint()) {
            methods.add(CONSOLIDATION_SUM);
        }//from   w  w  w  .j  a v a  2s  . c o  m
        for (String method : methods) {
            DescriptiveStatistics statsMaker = new DescriptiveStatistics();
            Object dataVal = null;
            dataVal = null;
            // loop over data for consolidation Method
            LinkedList<String> tempPath = new LinkedList<String>(dataPath);
            if (keyDef.isPoint()) {
                tempPath.add(method);
            } else {
                tempPath.add(CONSOLIDATION_SUM);
            }
            double input = -1;
            for (Map<String, Object> dataMap : serviceData) {
                // go to end of path & get variable
                input = -1;
                dataVal = getDataVal(dataMap, tempPath);
                if (dataVal != null) {
                    // extract relevant data from end of path
                    input = statToDouble(dataVal);
                    if (input > -1) {
                        // add to stat maker if relevant
                        statsMaker.addValue(input);
                    }
                }
            }
            // store in map if method is valid
            methodMapBuilder(method, statsMaker, statConsolidatedData);
        }

        // store stat methods' data on main consolidated map
        putOnPath(dataPath, consolidatedData, statConsolidatedData);
    }
}

From source file:azkaban.metric.inmemoryemitter.InMemoryMetricEmitter.java

private DescriptiveStatistics getDescriptiveStatistics(final LinkedList<InMemoryHistoryNode> selectedLists)
        throws ClassCastException {
    DescriptiveStatistics descStats = new DescriptiveStatistics();
    for (InMemoryHistoryNode node : selectedLists) {
        descStats.addValue(((Number) node.getValue()).doubleValue());
    }// w w  w .  ja  v a  2s  . c  om
    return descStats;
}

From source file:knop.psfj.heatmap.FractionnedSpace.java

/**
 * Gets the mean number of beads.//from   w w w .  j  a  va  2 s  . c  o m
 *
 * @return the mean number of beads
 */
public double getMeanNumberOfBeads() {
    DescriptiveStatistics stats = new DescriptiveStatistics();

    for (Fraction f : this) {
        stats.addValue(f.size());
    }
    return stats.getMean();
}