Example usage for org.apache.commons.math3.stat.descriptive DescriptiveStatistics getStandardDeviation

List of usage examples for org.apache.commons.math3.stat.descriptive DescriptiveStatistics getStandardDeviation

Introduction

In this page you can find the example usage for org.apache.commons.math3.stat.descriptive DescriptiveStatistics getStandardDeviation.

Prototype

public double getStandardDeviation() 

Source Link

Document

Returns the standard deviation of the available values.

Usage

From source file:edu.ucsc.barrel.cdf_gen.CDF_Gen.java

public static void fill511Gaps() {
    int size = data.getSize("mod32"), step_size = 1, start = 0;
    double delta, std_dev, med;
    float m, b, //values used for interpolating data        
            fill = (Float) CDFVar.getIstpVal("FLOAT_FILL"), new_value = fill, last_value = fill;
    DescriptiveStatistics stats = new DescriptiveStatistics();

    //generate statistics on the 511 peak jump sizes
    for (int peak_i = 0; peak_i < (size - 1); peak_i++) {
        if (data.peak511_bin[peak_i] == fill) {
            continue;
        }//from   ww  w . ja  va 2 s  .co  m
        if (data.peak511_bin[peak_i + 1] == fill) {
            continue;
        }

        delta = data.peak511_bin[peak_i + 1] - data.peak511_bin[peak_i];
        if (delta != 0) {
            stats.addValue(delta);
        }
    }
    std_dev = stats.getStandardDeviation();
    med = stats.getPercentile(50);

    //find first good value
    for (start = 0; start < size; start++) {
        if (data.peak511_bin[start] != fill) {
            new_value = data.peak511_bin[start];
            last_value = data.peak511_bin[start];
            break;
        }
    }

    //fill any missing data before the first point
    Arrays.fill(data.peak511_bin, 0, start, new_value);

    for (int filler_i = start + 1; filler_i < size; filler_i++) {
        if (data.peak511_bin[filler_i] == fill) {
            //temporarily fill the gap with the last good value 
            //this is done in case there is not another good value
            //to use for interpolation
            data.peak511_bin[filler_i] = last_value;
            step_size++;
        } else {
            //make sure jump size wasn't too big
            delta = data.peak511_bin[filler_i] - data.peak511_bin[filler_i - 1];
            // if(Math.abs(delta - med) > (std_dev * 3)){
            //    data.peak511_bin[filler_i] = last_value;
            //    step_size++;
            // }

            last_value = new_value;
            new_value = data.peak511_bin[filler_i];

            //fill any gaps
            if (step_size > 1) {
                m = (last_value - new_value) / step_size;
                b = new_value - (m * filler_i);

                for (int fill_i = filler_i - step_size; fill_i < filler_i; fill_i++) {
                    data.peak511_bin[fill_i] = m * fill_i + b;
                }

                step_size = 1;
            }
        }
    }
}

From source file:com.github.aptd.simulation.core.statistic.local.CStatistic.java

/**
 * write data//from   w w  w.  j  a  v  a2  s.c o  m
 *
 * @param p_writer writer instance
 * @param p_name section name
 * @param p_statistic statistic value
 */
private static void apply(final IWriter p_writer, final String p_name,
        final DescriptiveStatistics p_statistic) {
    p_writer.section(1, p_name);

    p_writer.value("geometricmean", p_statistic.getGeometricMean());
    p_writer.value("kurtosis", p_statistic.getKurtosis());
    p_writer.value("max", p_statistic.getMax());
    p_writer.value("min", p_statistic.getMin());
    p_writer.value("mean", p_statistic.getMean());
    p_writer.value("count", p_statistic.getN());
    p_writer.value("25-percentile", p_statistic.getPercentile(0.25));
    p_writer.value("75-percentile", p_statistic.getPercentile(0.75));
    p_writer.value("populationvariance", p_statistic.getPopulationVariance());
    p_writer.value("quadraticmean", p_statistic.getQuadraticMean());
    p_writer.value("standdeviation", p_statistic.getStandardDeviation());
    p_writer.value("skewness", p_statistic.getSkewness());
    p_writer.value("sum", p_statistic.getSum());
    p_writer.value("sumsequared", p_statistic.getSumsq());
    p_writer.value("variance", p_statistic.getVariance());
}

From source file:cc.kave.commons.pointsto.evaluation.runners.ProjectStoreRunner.java

private static void countRecvCallSites(Collection<ICoReTypeName> types, ProjectUsageStore store)
        throws IOException {
    DescriptiveStatistics statistics = new DescriptiveStatistics();
    for (ICoReTypeName type : types) {
        if (store.getProjects(type).size() < 10) {
            continue;
        }/*from   ww  w . j  a  v a 2  s.  c  o m*/

        int numDistinctRecvCallsite = store.load(type, new PointsToUsageFilter()).stream()
                .flatMap(usage -> usage.getReceiverCallsites().stream()).map(CallSite::getMethod)
                .collect(Collectors.toSet()).size();
        if (numDistinctRecvCallsite > 0) {
            statistics.addValue(numDistinctRecvCallsite);
            System.out.printf(Locale.US, "%s: %d\n", CoReNames.vm2srcQualifiedType(type),
                    numDistinctRecvCallsite);
        }
    }
    System.out.println();
    System.out.printf(Locale.US, "mean: %.3f, stddev: %.3f, median: %.1f\n", statistics.getMean(),
            statistics.getStandardDeviation(), statistics.getPercentile(50));
}

From source file:main.java.metric.Metric.java

public static void getPartitionStatistic(Cluster cluster) {
    DescriptiveStatistics _data = new DescriptiveStatistics();
    DescriptiveStatistics _inflow = new DescriptiveStatistics();
    DescriptiveStatistics _outflow = new DescriptiveStatistics();

    for (Partition partition : cluster.getPartitions()) {

        _data.addValue(partition.getPartition_dataSet().size());
        _inflow.addValue(partition.getPartition_inflow());
        _outflow.addValue(partition.getPartition_outflow());
    }//from  w  w w .  j  av a  2s . c o m

    mean_partition_inflow.add(_inflow.getMean());
    mean_partition_outflow.add(_outflow.getMean());
    mean_partition_data.add(_data.getMean());
    sd_partition_data.add(_data.getStandardDeviation());
}

From source file:de.tudarmstadt.ukp.experiments.argumentation.convincingness.sampling.Step6GraphTransitivityCleaner.java

@SuppressWarnings("unchecked")
public static void printResultStatistics(File xmlFile) throws IllegalAccessException {
    Map<String, Map<String, GraphCleaningResults>> results = (Map<String, Map<String, GraphCleaningResults>>) XStreamTools
            .getXStream().fromXML(xmlFile);

    //        System.out.println(results);

    SortedMap<String, List<GraphCleaningResults>> resultsGroupedByMethod = new TreeMap<>();

    for (Map.Entry<String, Map<String, GraphCleaningResults>> entry : results.entrySet()) {
        //            System.out.println(entry.getKey());

        for (Map.Entry<String, GraphCleaningResults> e : entry.getValue().entrySet()) {
            //                System.out.println(e.getKey());
            //                System.out.println(e.getValue());

            if (!resultsGroupedByMethod.containsKey(e.getKey())) {
                resultsGroupedByMethod.put(e.getKey(), new ArrayList<GraphCleaningResults>());
            }//  w w  w. j  a  v a 2 s  . c o  m

            resultsGroupedByMethod.get(e.getKey()).add(e.getValue());
        }
    }

    String header = null;

    // collect statistics
    for (Map.Entry<String, List<GraphCleaningResults>> entry : resultsGroupedByMethod.entrySet()) {
        List<GraphCleaningResults> value = entry.getValue();
        SortedMap<String, DescriptiveStatistics> stringDescriptiveStatisticsMap = collectStatisticsOverGraphCleaningResults(
                value);

        if (header == null) {
            header = StringUtils.join(stringDescriptiveStatisticsMap.keySet(), "\t");
            System.out.println("\t\t" + header);
        }

        List<Double> means = new ArrayList<>();
        List<Double> stdDevs = new ArrayList<>();
        for (DescriptiveStatistics statistics : stringDescriptiveStatisticsMap.values()) {
            means.add(statistics.getMean());
            stdDevs.add(statistics.getStandardDeviation());
        }

        List<String> meansString = new ArrayList<>();
        for (Double mean : means) {
            meansString.add(String.format(Locale.ENGLISH, "%.2f", mean));
        }

        List<String> stdDevString = new ArrayList<>();
        for (Double stdDev : stdDevs) {
            stdDevString.add(String.format(Locale.ENGLISH, "%.2f", stdDev));
        }

        System.out.println(entry.getKey() + "\tmean\t" + StringUtils.join(meansString, "\t"));
        //            System.out.println(entry.getKey() + "\tstdDev\t" + StringUtils.join(stdDevString, "\t"));
    }
}

From source file:com.insightml.data.features.stats.FeatureStatistics.java

public Double getStandardDeviation(final String feature) {
    final DescriptiveStatistics stat = stats.get(feature);
    return stat == null ? null : stat.getStandardDeviation();
}

From source file:ijfx.core.overlay.PixelStatisticsBase.java

public PixelStatisticsBase(DescriptiveStatistics stats) {

    setMean(stats.getMean());/*  w w w. j  a  v  a2  s . c  o  m*/
    setMax(stats.getMax());
    setStandardDeviation(stats.getStandardDeviation());
    setVariance(stats.getVariance());
    setMedian(stats.getPercentile(50));
    setPixelCount(stats.getN());
    setMin(stats.getMin());

}

From source file:com.intuit.tank.service.impl.v1.report.SummaryReportRunner.java

/**
 * @param key//from www .  j a v a2 s. com
 * @param value
 * @return
 */
private static SummaryData getSummaryData(int jobId, String key, DescriptiveStatistics stats) {
    SummaryData ret = SummaryDataBuilder.summaryData().withJobId(jobId)
            .withKurtosis(!Double.isNaN(stats.getKurtosis()) ? stats.getKurtosis() : 0).withMax(stats.getMax())
            .withMean(stats.getMean()).withMin(stats.getMin()).withPageId(key)
            .withPercentile10(stats.getPercentile(10)).withPercentile20(stats.getPercentile(20))
            .withPercentile30(stats.getPercentile(30)).withPercentile40(stats.getPercentile(40))
            .withPercentile50(stats.getPercentile(50)).withPercentile60(stats.getPercentile(60))
            .withPercentile70(stats.getPercentile(70)).withPercentile80(stats.getPercentile(80))
            .withPercentile90(stats.getPercentile(90)).withPercentile95(stats.getPercentile(95))
            .withPercentile99(stats.getPercentile(99)).withSampleSize((int) stats.getN())
            .withSkewness(!Double.isNaN(stats.getSkewness()) ? stats.getSkewness() : 0)
            .withSttDev(!Double.isNaN(stats.getStandardDeviation()) ? stats.getStandardDeviation() : 0)
            .withVarience(!Double.isNaN(stats.getVariance()) ? stats.getVariance() : 0).build();
    return ret;
}

From source file:ijfx.plugins.projection.StandardDeviationProjection.java

@Override
public <T extends RealType<T>> void process(List<T> list, Sampler<T> sampler) {
    DescriptiveStatistics descriptiveStatistics = new DescriptiveStatistics();
    list.stream().forEach((t) -> descriptiveStatistics.addValue(t.getRealDouble()));

    //Set result//  w  w w .  j  a v  a2  s .c  o  m
    sampler.get().setReal(descriptiveStatistics.getStandardDeviation());
}

From source file:kmi.taa.core.SmallSetAnalyser.java

public double std(HashMap<Integer, Double> map) {
    double[] va = new double[map.size()];
    int i = 0;//  w  w w  . jav a  2s. c  om
    for (Integer k : map.keySet()) {
        va[i++] = map.get(k).doubleValue();
    }
    DescriptiveStatistics ds = new DescriptiveStatistics(va);
    return ds.getStandardDeviation();
}