Example usage for org.apache.commons.math3.stat.descriptive DescriptiveStatistics addValue

List of usage examples for org.apache.commons.math3.stat.descriptive DescriptiveStatistics addValue

Introduction

In this page you can find the example usage for org.apache.commons.math3.stat.descriptive DescriptiveStatistics addValue.

Prototype

public void addValue(double v) 

Source Link

Document

Adds the value to the dataset.

Usage

From source file:ijfx.plugins.MedianProjection.java

@Override
public <T extends RealType<T>> void process(List<T> list, Sampler<T> sampler) {
    DescriptiveStatistics descriptiveStatistics = new DescriptiveStatistics();
    list.stream().forEach((t) -> descriptiveStatistics.addValue(t.getRealDouble()));

    //Set result/*from ww w. j a v  a2  s .c om*/
    sampler.get().setReal(descriptiveStatistics.getPercentile(50));
}

From source file:ijfx.plugins.projection.StandardDeviationProjection.java

@Override
public <T extends RealType<T>> void process(List<T> list, Sampler<T> sampler) {
    DescriptiveStatistics descriptiveStatistics = new DescriptiveStatistics();
    list.stream().forEach((t) -> descriptiveStatistics.addValue(t.getRealDouble()));

    //Set result/*  www.  j a va 2s .c o  m*/
    sampler.get().setReal(descriptiveStatistics.getStandardDeviation());
}

From source file:com.fpuna.preproceso.PreprocesoTS.java

private static void calculoFeatures(Registro[] muestras, String activity) {

    DescriptiveStatistics stats_x = new DescriptiveStatistics();
    DescriptiveStatistics stats_y = new DescriptiveStatistics();
    DescriptiveStatistics stats_z = new DescriptiveStatistics();
    //DescriptiveStatistics stats_m1 = new DescriptiveStatistics();
    //DescriptiveStatistics stats_m2 = new DescriptiveStatistics();
    double[] fft_x;
    double[] fft_y;
    double[] fft_z;
    double[] AR_4;

    for (int i = 0; i < muestras.length; i++) {
        stats_x.addValue(muestras[i].getValor_x());
        stats_y.addValue(muestras[i].getValor_y());
        stats_z.addValue(muestras[i].getValor_z());
    }//from  w ww. j  a  v a2s .  c o  m

    //********* FFT *********
    fft_x = Util.transform(stats_x.getValues());
    fft_y = Util.transform(stats_y.getValues());
    fft_z = Util.transform(stats_z.getValues());

    //******************* Eje X *******************//
    //mean(s) - Arithmetic mean
    System.out.print(stats_x.getMean() + ",");
    //std(s) - Standard deviation
    System.out.print(stats_x.getStandardDeviation() + ",");
    //mad(s) - Median absolute deviation
    //
    //max(s) - Largest values in array
    System.out.print(stats_x.getMax() + ",");
    //min(s) - Smallest value in array
    System.out.print(stats_x.getMin() + ",");
    //skewness(s) - Frequency signal Skewness
    System.out.print(stats_x.getSkewness() + ",");
    //kurtosis(s) - Frequency signal Kurtosis
    System.out.print(stats_x.getKurtosis() + ",");
    //energy(s) - Average sum of the squares
    System.out.print(stats_x.getSumsq() / stats_x.getN() + ",");
    //entropy(s) - Signal Entropy
    System.out.print(Util.calculateShannonEntropy(fft_x) + ",");
    //iqr (s) Interquartile range
    System.out.print(stats_x.getPercentile(75) - stats_x.getPercentile(25) + ",");
    try {
        //autoregression (s) -4th order Burg Autoregression coefficients
        AR_4 = AutoRegression.calculateARCoefficients(stats_x.getValues(), 4, true);
        System.out.print(AR_4[0] + ",");
        System.out.print(AR_4[1] + ",");
        System.out.print(AR_4[2] + ",");
        System.out.print(AR_4[3] + ",");
    } catch (Exception ex) {
        Logger.getLogger(PreprocesoTS.class.getName()).log(Level.SEVERE, null, ex);
    }
    //meanFreq(s) - Frequency signal weighted average
    System.out.print(Util.meanFreq(fft_x, stats_x.getValues()) + ",");

    //******************* Eje Y *******************//
    //mean(s) - Arithmetic mean
    System.out.print(stats_y.getMean() + ",");
    //std(s) - Standard deviation
    System.out.print(stats_y.getStandardDeviation() + ",");
    //mad(s) - Median absolute deviation
    //
    //max(s) - Largest values in array
    System.out.print(stats_y.getMax() + ",");
    //min(s) - Smallest value in array
    System.out.print(stats_y.getMin() + ",");
    //skewness(s) - Frequency signal Skewness
    System.out.print(stats_y.getSkewness() + ",");
    //kurtosis(s) - Frequency signal Kurtosis
    System.out.print(stats_y.getKurtosis() + ",");
    //energy(s) - Average sum of the squares
    System.out.print(stats_y.getSumsq() / stats_y.getN() + ",");
    //entropy(s) - Signal Entropy
    System.out.print(Util.calculateShannonEntropy(fft_y) + ",");
    //iqr (s) Interquartile range
    System.out.print(stats_y.getPercentile(75) - stats_y.getPercentile(25) + ",");
    try {
        //autoregression (s) -4th order Burg Autoregression coefficients
        AR_4 = AutoRegression.calculateARCoefficients(stats_y.getValues(), 4, true);
        System.out.print(AR_4[0] + ",");
        System.out.print(AR_4[1] + ",");
        System.out.print(AR_4[2] + ",");
        System.out.print(AR_4[3] + ",");
    } catch (Exception ex) {
        Logger.getLogger(PreprocesoTS.class.getName()).log(Level.SEVERE, null, ex);
    }
    //meanFreq(s) - Frequency signal weighted average
    System.out.print(Util.meanFreq(fft_y, stats_y.getValues()) + ",");

    //******************* Eje Z *******************//
    //mean(s) - Arithmetic mean
    System.out.print(stats_z.getMean() + ",");
    //std(s) - Standard deviation
    System.out.print(stats_z.getStandardDeviation() + ",");
    //mad(s) - Median absolute deviation
    //
    //max(s) - Largest values in array
    System.out.print(stats_z.getMax() + ",");
    //min(s) - Smallest value in array
    System.out.print(stats_z.getMin() + ",");
    //skewness(s) - Frequency signal Skewness
    System.out.print(stats_z.getSkewness() + ",");
    //kurtosis(s) - Frequency signal Kurtosis
    System.out.print(stats_z.getKurtosis() + ",");
    //energy(s) - Average sum of the squares
    System.out.print(stats_z.getSumsq() / stats_z.getN() + ",");
    //entropy(s) - Signal Entropy
    System.out.print(Util.calculateShannonEntropy(fft_z) + ",");
    //iqr (s) Interquartile range
    System.out.print(stats_z.getPercentile(75) - stats_z.getPercentile(25) + ",");
    try {
        //autoregression (s) -4th order Burg Autoregression coefficients
        AR_4 = AutoRegression.calculateARCoefficients(stats_z.getValues(), 4, true);
        System.out.print(AR_4[0] + ",");
        System.out.print(AR_4[1] + ",");
        System.out.print(AR_4[2] + ",");
        System.out.print(AR_4[3] + ",");
    } catch (Exception ex) {
        Logger.getLogger(PreprocesoTS.class.getName()).log(Level.SEVERE, null, ex);
    }
    //meanFreq(s) - Frequency signal weighted average
    System.out.print(Util.meanFreq(fft_z, stats_z.getValues()) + ",");

    //******************* Feature combinados *******************/
    //sma(s1; s2; s3) - Signal magnitude area
    System.out.print(Util.sma(stats_x.getValues(), stats_y.getValues(), stats_z.getValues()) + ",");
    //correlation(s1; s2) - Pearson Correlation coefficient
    System.out.print(new PearsonsCorrelation().correlation(stats_x.getValues(), stats_y.getValues()) + ",");
    System.out.print(new PearsonsCorrelation().correlation(stats_x.getValues(), stats_z.getValues()) + ",");
    System.out.print(new PearsonsCorrelation().correlation(stats_y.getValues(), stats_z.getValues()) + ",");

    //******************* Actividad *******************/
    System.out.print(activity);
    System.out.print("\n");
}

From source file:cc.kave.commons.pointsto.evaluation.PointsToSetEvaluation.java

public void run(Path contextsDir) throws IOException {
    StatementCounterVisitor stmtCounterVisitor = new StatementCounterVisitor();
    List<Context> contexts = getSamples(contextsDir).stream()
            .filter(cxt -> cxt.getSST().accept(stmtCounterVisitor, null) > 0).collect(Collectors.toList());
    log("Using %d contexts for evaluation\n", contexts.size());

    PointsToUsageExtractor extractor = new PointsToUsageExtractor();
    for (Context context : contexts) {
        PointstoSetSizeAnalysis analysis = new PointstoSetSizeAnalysis();
        extractor.extract(analysis.compute(context));
        results.addAll(analysis.getSetSizes());
    }//from w  w  w .j av a 2 s  . com

    DescriptiveStatistics statistics = new DescriptiveStatistics();
    for (Integer setSize : results) {
        statistics.addValue(setSize.doubleValue());
    }
    log("mean: %.2f\n", statistics.getMean());
    log("stddev: %.2f\n", statistics.getStandardDeviation());
    log("min/max: %.2f/%.2f\n", statistics.getMin(), statistics.getMax());
}

From source file:mase.me.MEGenerationalStat.java

@Override
public void postBreedingStatistics(EvolutionState state) {
    super.postBreedingStatistics(state);
    MESubpopulation pop = (MESubpopulation) state.population.subpops[0];
    DescriptiveStatistics fit = new DescriptiveStatistics();
    for (Individual ind : pop.map.values()) {
        fit.addValue(((ExpandedFitness) ind.fitness).getFitnessScore());
    }// w ww. ja  va2s .  co  m
    state.output.println(state.generation + " " + pop.map.keySet().size() + " " + pop.map.size() + " "
            + fit.getMin() + " " + fit.getMean() + " " + fit.getMax() + " " + pop.newInRepo, log);
    state.output.message("Repertoire size: " + pop.map.keySet().size() + " | New: " + pop.newInRepo
            + " | Avg. fitness: " + new DecimalFormat("0.0000").format(fit.getMean()));
}

From source file:io.hops.experiments.results.compiler.RawBMResultAggregator.java

public static RawBMResults processSlaveResponses(Collection<Object> responses,
        RawBenchmarkCommand.Request request, Configuration args) {
    DescriptiveStatistics successfulOps = new DescriptiveStatistics();
    DescriptiveStatistics failedOps = new DescriptiveStatistics();
    DescriptiveStatistics speed = new DescriptiveStatistics();
    DescriptiveStatistics duration = new DescriptiveStatistics();
    DescriptiveStatistics noOfAliveNNs = new DescriptiveStatistics();
    for (Object obj : responses) {
        if (!(obj instanceof RawBenchmarkCommand.Response) || (obj instanceof RawBenchmarkCommand.Response
                && ((RawBenchmarkCommand.Response) obj).getPhase() != request.getPhase())) {
            throw new IllegalStateException("Wrong response received from the client");
        } else {//from   w w w  .  j av a2  s .c o m
            RawBenchmarkCommand.Response response = (RawBenchmarkCommand.Response) obj;
            successfulOps.addValue(response.getTotalSuccessfulOps());
            failedOps.addValue(response.getTotalFailedOps());
            speed.addValue(response.getOpsPerSec());
            duration.addValue(response.getRunTime());
            noOfAliveNNs.addValue(response.getNnCount());
        }
    }

    RawBMResults result = new RawBMResults(args.getNamenodeCount(), (int) Math.floor(noOfAliveNNs.getMean()),
            args.getNdbNodesCount(), request.getPhase(),
            (successfulOps.getSum() / ((duration.getMean() / 1000))), (duration.getMean() / 1000),
            (successfulOps.getSum()), (failedOps.getSum()));
    return result;
}

From source file:com.linuxbox.enkive.teststats.StatsDayGrainTest.java

@Test
public void consolidationMethods() {
    List<Map<String, Object>> consolidatedData = grain.consolidateData();
    assertTrue("the consolidated data is null", consolidatedData != null);
    String methods[] = { CONSOLIDATION_AVG, CONSOLIDATION_MAX, CONSOLIDATION_MIN };
    DescriptiveStatistics statsMaker = new DescriptiveStatistics();
    statsMaker.addValue(111);
    statsMaker.addValue(11);/*ww  w .  j  av  a2  s  .  c om*/
    statsMaker.addValue(1);
    Map<String, Object> statData = new HashMap<String, Object>();
    for (String method : methods) {
        grain.methodMapBuilder(method, statsMaker, statData);
    }
    assertTrue("methodMapBuilder returned null", statData != null);
}

From source file:com.linuxbox.enkive.teststats.StatsHourGrainTest.java

@Test
public void consolidationMethods() {
    List<Map<String, Object>> consolidatedData = grain.consolidateData();
    assertTrue("the consolidated data is null", consolidatedData != null);
    String methods[] = { CONSOLIDATION_AVG, CONSOLIDATION_MAX, CONSOLIDATION_MIN };
    DescriptiveStatistics statsMaker = new DescriptiveStatistics();
    statsMaker.addValue(111);
    statsMaker.addValue(11);/* w  w  w.  j  ava2  s .  co m*/
    statsMaker.addValue(1);
    Map<String, Object> statData = createMap();
    for (String method : methods) {
        grain.methodMapBuilder(method, statsMaker, statData);
    }
    assertTrue("methodMapBuilder returned null", statData != null);
}

From source file:io.yields.math.concepts.operator.Smoothness.java

@Override
public DescriptiveStatistics apply(Collection<Tuple> tuples) {
    Validate.isTrue(tuples.size() > order);
    //first we normalize the tuples so data fits in the unit square
    List<Tuple> normalizedData = normalize(tuples);
    //calculate error (i.e. distance between tuple y and fitted polynomial
    RealMatrix error = computeDistance(normalizedData);
    //group in stats object
    DescriptiveStatistics stats = new DescriptiveStatistics();
    for (double value : error.getColumn(0)) {
        stats.addValue(Math.abs(value));
    }/*from   w w w .j a va2 s. com*/
    return stats;
}

From source file:com.insightml.evaluation.functions.MedianError.java

@Override
public DescriptiveStatistics label(final Serializable[] preds, final Object[] expected, final double[] weights,
        final ISamples<?, ?> samples, final int labelIndex) {
    final DescriptiveStatistics stats = new DescriptiveStatistics();
    for (int i = 0; i < preds.length; ++i) {
        final double[] predAndAct = toDouble(preds[i], expected[i]);
        stats.addValue(Math.abs(predAndAct[0] - predAndAct[1]));
    }//from  ww  w.  jav a 2 s  .c  o m
    return new DescriptiveStatistics(new double[] { stats.getPercentile(50) });
}