Example usage for org.apache.commons.math3.stat.descriptive.moment Mean Mean

List of usage examples for org.apache.commons.math3.stat.descriptive.moment Mean Mean

Introduction

In this page you can find the example usage for org.apache.commons.math3.stat.descriptive.moment Mean Mean.

Prototype

public Mean() 

Source Link

Document

Constructs a Mean.

Usage

From source file:org.drugis.mtc.summary.MCMCMultivariateNormalSummary.java

private void calculateResults() {
    if (!isReady()) {
        return;//from   w w w .j a v a 2  s  .  c  o  m
    }
    List<List<Double>> sampleCache = new ArrayList<List<Double>>();
    for (int i = 0; i < getParameters().length; ++i) {
        List<Double> samples = SummaryUtil.getAllChainsLastHalfSamples(d_results, getParameters()[i]);
        sampleCache.add(samples);
        d_means[i] = SummaryUtil.evaluate(new Mean(), samples);
    }
    StorelessCovariance cov = new StorelessCovariance(getParameters().length);
    double[] rowData = new double[getParameters().length];
    for (int row = 0; row < sampleCache.get(0).size(); ++row) {
        for (int col = 0; col < getParameters().length; ++col) {
            rowData[col] = sampleCache.get(col).get(row);
        }
        cov.increment(rowData);
    }
    d_covMatrix = cov.getData();
    boolean wasDefined = d_isDefined;
    d_isDefined = true;
    firePropertyChange(PROPERTY_DEFINED, wasDefined, d_isDefined);
    firePropertyChange(PROPERTY_MEAN_VECTOR, null, d_means);
    firePropertyChange(PROPERTY_COVARIANCE_MATRIX, null, d_covMatrix);
}

From source file:org.drugis.mtc.summary.NormalSummary.java

private synchronized void calculateResults() {
    if (!isReady())
        return;//from   w w w  . j  a v  a2s. c  o  m
    List<Double> samples = SummaryUtil.getAllChainsLastHalfSamples(d_results, d_parameter);
    d_mean = SummaryUtil.evaluate(new Mean(), samples);
    d_stdev = SummaryUtil.evaluate(new StandardDeviation(), samples);
    d_defined = true;
    firePropertyChange(PROPERTY_DEFINED, null, d_defined);
    firePropertyChange(PROPERTY_MEAN, null, d_mean);
    firePropertyChange(PROPERTY_STANDARD_DEVIATION, null, d_stdev);
}

From source file:org.drugis.mtc.yadas.ContinuousDataIT.java

@Before
public void setUp() throws JAXBException {
    // data from Welton et. al., Am J Epidemiol 2009;169:1158-1165
    d_m = -1.362791; // mean(d)
    d_s = 0.982033; // sd(d)
    d_mean = new Mean();
    d_stdDev = new StandardDeviation();

    InputStream is = ContinuousDataIT.class.getResourceAsStream("weltonBP.xml");
    d_network = JAXBHandler.readNetwork(is);

    d_psych = new Treatment("psych");
    d_usual = new Treatment("usual");
}

From source file:org.hawkular.datamining.forecast.stats.AutoCorrelationFunction.java

private static double[] evaluate(final double[] x, int maxLag, boolean correlation) {
    // max lag = length - 1
    if (maxLag >= x.length) {
        throw new IllegalArgumentException("Lag is higher than ");
    }//from   w w  w . j a va2s . c  o m

    double mean = new Mean().evaluate(x);
    double var = correlation ? new Variance().evaluate(x, mean) : 1;
    int lengthForMean = correlation ? x.length - 1 : x.length;

    double[] acf = new double[maxLag + 1];
    for (int lag = 0; lag < maxLag + 1; lag++) {

        double sum = 0;
        for (int i = 0; i + lag < x.length; i++) {

            sum += (x[i] - mean) * (x[i + lag] - mean);
        }

        sum /= lengthForMean;
        acf[lag] = sum / var;
    }

    return acf;
}

From source file:org.hawkular.datamining.forecast.utils.AdditiveSeasonalDecomposition.java

/**
 *
 * @return Seasonal indices//w  ww  .  ja  v  a  2  s. com
 */
public double[] decompose() {
    SimpleMovingAverage movingAverage = new SimpleMovingAverage(original, periods, true);
    trend = movingAverage.learn();

    // subtract trend from original (detrend)
    List<DataPoint> detrended = new ArrayList<>(original.size());
    for (int i = 0; i < original.size(); i++) {
        Double trend = this.trend.get(i).getValue();
        Double value = trend != null ? original.get(i).getValue() - trend : null;
        detrended.add(new DataPoint(value, original.get(i).getTimestamp()));
    }

    /**
     * Seasonal
     * averages of each season of detrended series
     */
    int completeSeasons = original.size() / periods;
    completeSeasons += original.size() % 2 != 0 ? 1 : 0;
    seasonalIndices = new double[periods];
    for (int period = 0; period < periods; period++) {
        int seasonsWithoutNull = 0;
        for (int season = 0; season < completeSeasons
                && period + (periods * season) < original.size(); season++) {
            if (detrended.get(period + (periods * season)).getValue() == null) {
                continue;
            }
            seasonalIndices[period] += detrended.get(period + (periods * season)).getValue();
            seasonsWithoutNull++;
        }
        seasonalIndices[period] = seasonalIndices[period] / (double) seasonsWithoutNull;
    }

    // subtract mean
    double mean = new Mean().evaluate(seasonalIndices);
    for (int i = 0; i < seasonalIndices.length; i++) {
        seasonalIndices[i] = seasonalIndices[i] - mean;
    }

    return Arrays.copyOf(seasonalIndices, seasonalIndices.length);
}

From source file:org.hawkular.metrics.core.impl.cassandra.GaugeBucketedOutputMapper.java

@Override
protected GaugeBucketDataPoint newPointInstance(long from, long to, List<GaugeData> gaugeDatas) {
    double[] values = new double[gaugeDatas.size()];
    for (ListIterator<GaugeData> iterator = gaugeDatas.listIterator(); iterator.hasNext();) {
        GaugeData gaugeData = iterator.next();
        values[iterator.previousIndex()] = gaugeData.getValue();
    }/* w  ww .  ja  v a 2 s  .com*/

    Percentile percentile = new Percentile();
    percentile.setData(values);

    return new GaugeBucketDataPoint.Builder(from, to).setMin(new Min().evaluate(values))
            .setAvg(new Mean().evaluate(values)).setMedian(percentile.evaluate(50.0))
            .setMax(new Max().evaluate(values)).setPercentile95th(percentile.evaluate(95.0)).build();
}

From source file:org.hawkular.metrics.core.impl.cassandra.NumericBucketedOutputMapper.java

@Override
protected NumericBucketDataPoint newPointInstance(long from, long to, List<NumericData> numericDatas) {
    double[] values = new double[numericDatas.size()];
    for (ListIterator<NumericData> iterator = numericDatas.listIterator(); iterator.hasNext();) {
        NumericData numericData = iterator.next();
        values[iterator.previousIndex()] = numericData.getValue();
    }/*w  w  w.j av a2s. c o  m*/

    Percentile percentile = new Percentile();
    percentile.setData(values);

    return new NumericBucketDataPoint.Builder(from, to).setMin(new Min().evaluate(values))
            .setAvg(new Mean().evaluate(values)).setMedian(percentile.evaluate(50.0))
            .setMax(new Max().evaluate(values)).setPercentile95th(percentile.evaluate(95.0)).build();
}

From source file:org.hawkular.metrics.core.impl.GaugeBucketedOutputMapper.java

@Override
protected GaugeBucketDataPoint newPointInstance(long from, long to, List<DataPoint<Double>> dataPoints) {
    double[] values = new double[dataPoints.size()];
    for (ListIterator<DataPoint<Double>> iterator = dataPoints.listIterator(); iterator.hasNext();) {
        DataPoint<Double> gaugeData = iterator.next();
        values[iterator.previousIndex()] = gaugeData.getValue();
    }/*from  www  . ja  va2s .  com*/

    Percentile percentile = new Percentile();
    percentile.setData(values);

    return new GaugeBucketDataPoint.Builder(from, to).setMin(new Min().evaluate(values))
            .setAvg(new Mean().evaluate(values)).setMedian(percentile.evaluate(50.0))
            .setMax(new Max().evaluate(values)).setPercentile95th(percentile.evaluate(95.0)).build();
}

From source file:org.hawkular.metrics.core.service.metrics.BaseMetricsITest.java

protected <T extends Number> NumericBucketPoint createSingleBucket(List<? extends DataPoint<T>> combinedData,
        DateTime start, DateTime end) {//w  w  w.j  a va2  s . c  o  m
    T expectedMin = combinedData.stream()
            .min((x, y) -> Double.compare(x.getValue().doubleValue(), y.getValue().doubleValue())).get()
            .getValue();
    T expectedMax = combinedData.stream()
            .max((x, y) -> Double.compare(x.getValue().doubleValue(), y.getValue().doubleValue())).get()
            .getValue();
    PercentileWrapper expectedMedian = NumericDataPointCollector.createPercentile.apply(50.0);
    Mean expectedAverage = new Mean();
    Sum expectedSamples = new Sum();
    Sum expectedSum = new Sum();
    combinedData.stream().forEach(arg -> {
        expectedMedian.addValue(arg.getValue().doubleValue());
        expectedAverage.increment(arg.getValue().doubleValue());
        expectedSamples.increment(1);
        expectedSum.increment(arg.getValue().doubleValue());
    });

    return new NumericBucketPoint.Builder(start.getMillis(), end.getMillis()).setMin(expectedMin.doubleValue())
            .setMax(expectedMax.doubleValue()).setAvg(expectedAverage.getResult())
            .setMedian(expectedMedian.getResult()).setSum(expectedSum.getResult())
            .setSamples(new Double(expectedSamples.getResult()).intValue()).build();
}

From source file:org.hawkular.metrics.core.service.MetricsServiceITest.java

private <T extends Number> NumericBucketPoint createSingleBucket(List<? extends DataPoint<T>> combinedData,
        DateTime start, DateTime end) {//from ww w .j a  v  a 2 s  .c o  m
    T expectedMin = combinedData.stream()
            .min((x, y) -> Double.compare(x.getValue().doubleValue(), y.getValue().doubleValue())).get()
            .getValue();
    T expectedMax = combinedData.stream()
            .max((x, y) -> Double.compare(x.getValue().doubleValue(), y.getValue().doubleValue())).get()
            .getValue();
    PercentileWrapper expectedMedian = NumericDataPointCollector.createPercentile.apply(50.0);
    Mean expectedAverage = new Mean();
    Sum expectedSamples = new Sum();
    combinedData.stream().forEach(arg -> {
        expectedMedian.addValue(arg.getValue().doubleValue());
        expectedAverage.increment(arg.getValue().doubleValue());
        expectedSamples.increment(1);
    });

    return new NumericBucketPoint.Builder(start.getMillis(), end.getMillis()).setMin(expectedMin.doubleValue())
            .setMax(expectedMax.doubleValue()).setAvg(expectedAverage.getResult())
            .setMedian(expectedMedian.getResult())
            .setSamples(new Double(expectedSamples.getResult()).intValue()).build();
}