Example usage for org.apache.commons.math.stat.descriptive DescriptiveStatistics getStandardDeviation

List of usage examples for org.apache.commons.math.stat.descriptive DescriptiveStatistics getStandardDeviation

Introduction

In this page you can find the example usage for org.apache.commons.math.stat.descriptive DescriptiveStatistics getStandardDeviation.

Prototype

public double getStandardDeviation() 

Source Link

Document

Returns the standard deviation of the available values.

Usage

From source file:playground.artemc.pricing.SocialCostCalculator.java

private void calcStatistics() {

    // Get a DescriptiveStatistics instance
    DescriptiveStatistics tripStats = new DescriptiveStatistics();
    DescriptiveStatistics tripStatsNormalized = new DescriptiveStatistics();

    // Add the data from the array
    for (LegTrip legTrip : performedLegs) {
        double distance = 0.0;
        double cost = 0.0;
        for (LinkTrip linkTrip : legTrip.linkTrips) {
            double socialCosts = calcSocCosts(linkTrip.link_id, linkTrip.enterTime);
            if (socialCosts > 0.0)
                cost = cost + socialCosts;
            distance = legTrip.distance + network.getLinks().get(linkTrip.link_id).getLength();
        }//w w w  . j a v a2  s .co  m

        legTrip.distance = distance;
        legTrip.cost = cost;

        tripStats.addValue(cost);

        /*
         * Normalize a legs social cost by dividing them by the leg travel time or leg distance.
         */
        //double legTravelTime = legTrip.arrivalTime - legTrip.departureTime;
        if (cost > 0.0 && legTrip.distance > 0.0)
            tripStatsNormalized.addValue(cost / legTrip.distance);
    }

    // Compute some statistics
    double sum = tripStats.getSum();
    double mean = tripStats.getMean();
    double std = tripStats.getStandardDeviation();
    double median = tripStats.getPercentile(50);
    double quantile25 = tripStats.getPercentile(25);
    double quantile75 = tripStats.getPercentile(75);

    double sumNormalized = tripStatsNormalized.getSum();
    double meanNormalized = tripStatsNormalized.getMean();
    double stdNormalized = tripStatsNormalized.getStandardDeviation();
    double medianNormalized = tripStatsNormalized.getPercentile(50);
    double quantile25Normalized = tripStatsNormalized.getPercentile(25);
    double quantile75Normalized = tripStatsNormalized.getPercentile(75);

    log.info("Sum of all leg costs: " + sum);
    log.info("Mean leg costs: " + mean);
    log.info("Standard deviation: " + std);
    log.info("Median leg costs: " + median);
    log.info("25% quantile leg costs: " + quantile25);
    log.info("75% quantile leg costs: " + quantile75);

    log.info("Normalized sum of all leg costs: " + sumNormalized);
    log.info("Normalized mean leg costs: " + meanNormalized);
    log.info("Normalized standard deviation: " + stdNormalized);
    log.info("Normalized median leg costs: " + medianNormalized);
    log.info("Normalized 25% quantile leg costs: " + quantile25Normalized);
    log.info("Normalized 75% quantile leg costs: " + quantile75Normalized);

    meanSocialCosts.add(mean);
    medianSocialCosts.add(median);
    quantil25PctSocialCosts.add(quantile25);
    quantil75PctSocialCosts.add(quantile75);

    meanNormalizedSocialCosts.add(meanNormalized);
    medianNormalizedSocialCosts.add(medianNormalized);
    quantil25PctNormalizedSocialCosts.add(quantile25Normalized);
    quantil75PctNormalizedSocialCosts.add(quantile75Normalized);
}

From source file:playground.christoph.socialcosts.SocialCostCalculator.java

private void calcStatistics() {

    // Get a DescriptiveStatistics instance
    DescriptiveStatistics stats = new DescriptiveStatistics();
    DescriptiveStatistics statsNormalized = new DescriptiveStatistics();

    // Add the data from the array
    for (LegTrip legTrip : performedLegs) {
        double costs = 0.0;
        for (LinkTrip linkTrip : legTrip.linkTrips) {
            double socialCosts = calcSocCosts(linkTrip.link_id, linkTrip.enterTime);
            if (socialCosts > 0.0)
                costs = costs + socialCosts;
        }/*ww  w  .ja v  a2s .c om*/
        stats.addValue(costs);

        /*
         * Normalize a legs social cost by dividing them by the leg travel time.
         * As a result we get something like social costs per traveled second.
         * Another option would be doing this on link level instead of leg level.
         */
        double legTravelTime = legTrip.arrivalTime - legTrip.departureTime;
        if (costs > 0.0 && legTravelTime > 0.0)
            statsNormalized.addValue(costs / legTravelTime);
    }

    // Compute some statistics
    double sum = stats.getSum();
    double mean = stats.getMean();
    double std = stats.getStandardDeviation();
    double median = stats.getPercentile(50);
    double quantile25 = stats.getPercentile(25);
    double quantile75 = stats.getPercentile(75);

    double sumNormalized = statsNormalized.getSum();
    double meanNormalized = statsNormalized.getMean();
    double stdNormalized = statsNormalized.getStandardDeviation();
    double medianNormalized = statsNormalized.getPercentile(50);
    double quantile25Normalized = statsNormalized.getPercentile(25);
    double quantile75Normalized = statsNormalized.getPercentile(75);

    log.info("Sum of all leg costs: " + sum);
    log.info("Mean leg costs: " + mean);
    log.info("Standard deviation: " + std);
    log.info("Median leg costs: " + median);
    log.info("25% quantile leg costs: " + quantile25);
    log.info("75% quantile leg costs: " + quantile75);

    log.info("Normalized sum of all leg costs: " + sumNormalized);
    log.info("Normalized mean leg costs: " + meanNormalized);
    log.info("Normalized standard deviation: " + stdNormalized);
    log.info("Normalized median leg costs: " + medianNormalized);
    log.info("Normalized 25% quantile leg costs: " + quantile25Normalized);
    log.info("Normalized 75% quantile leg costs: " + quantile75Normalized);

    meanSocialCosts.add(mean);
    medianSocialCosts.add(median);
    quantil25PctSocialCosts.add(quantile25);
    quantil75PctSocialCosts.add(quantile75);

    meanNormalizedSocialCosts.add(meanNormalized);
    medianNormalizedSocialCosts.add(medianNormalized);
    quantil25PctNormalizedSocialCosts.add(quantile25Normalized);
    quantil75PctNormalizedSocialCosts.add(quantile75Normalized);
}

From source file:playground.johannes.gsv.analysis.CountsCompareAnalyzer.java

@Override
public void notifyAfterMobsim(AfterMobsimEvent event) {
    Network network = event.getControler().getScenario().getNetwork();
    DescriptiveStatistics error = new DescriptiveStatistics();
    DescriptiveStatistics errorAbs = new DescriptiveStatistics();
    DescriptivePiStatistics errorWeighted = new WSMStatsFactory().newInstance();

    TDoubleArrayList errorVals = new TDoubleArrayList();
    TDoubleArrayList caps = new TDoubleArrayList();
    TDoubleArrayList speeds = new TDoubleArrayList();

    for (Count count : counts.getCounts().values()) {
        if (!count.getLocId().toString().startsWith(ODCalibrator.VIRTUAL_ID_PREFIX)) {
            double obsVal = 0;
            for (int i = 1; i < 25; i++) {
                obsVal += count.getVolume(i).getValue();
            }/*from  w ww .ja  v  a2s  . c  o  m*/

            if (obsVal > 0) {
                double simVal = calculator.getOccupancy(count.getLocId());
                simVal *= factor;

                double err = (simVal - obsVal) / obsVal;

                error.addValue(err);
                errorAbs.addValue(Math.abs(err));
                errorWeighted.addValue(Math.abs(err), 1 / obsVal);

                Link link = network.getLinks().get(count.getLocId());
                errorVals.add(Math.abs(err));
                caps.add(link.getCapacity());
                speeds.add(link.getFreespeed());
            }
        }
    }

    logger.info(String.format("Relative counts error: mean = %s, var = %s, stderr = %s, min = %s, max = %s",
            error.getMean(), error.getVariance(), error.getStandardDeviation(), error.getMin(),
            error.getMax()));
    logger.info(String.format(
            "Absolute relative counts error: mean = %s, var = %s, stderr = %s, min = %s, max = %s",
            errorAbs.getMean(), errorAbs.getVariance(), errorAbs.getStandardDeviation(), errorAbs.getMin(),
            errorAbs.getMax()));
    logger.info(String.format(
            "Absolute weigthed relative counts error: mean = %s, var = %s, stderr = %s, min = %s, max = %s",
            errorWeighted.getMean(), errorWeighted.getVariance(), errorWeighted.getStandardDeviation(),
            errorWeighted.getMin(), errorWeighted.getMax()));

    String outdir = event.getControler().getControlerIO().getIterationPath(event.getIteration());

    try {
        TDoubleDoubleHashMap map = Correlations.mean(caps.toNativeArray(), errorVals.toNativeArray());
        TXTWriter.writeMap(map, "capacity", "counts", String.format("%s/countsError.capacity.txt", outdir));

        map = Correlations.mean(speeds.toNativeArray(), errorVals.toNativeArray());
        TXTWriter.writeMap(map, "speed", "counts", String.format("%s/countsError.speed.txt", outdir));

        TXTWriter.writeMap(Histogram.createHistogram(error, new LinearDiscretizer(0.1), false), "Error",
                "Frequency", String.format("%s/countsError.hist.txt", outdir));
        TXTWriter.writeMap(Histogram.createHistogram(errorAbs, new LinearDiscretizer(0.1), false),
                "Error (absolute)", "Frequency", String.format("%s/countsErrorAbs.hist.txt", outdir));
        TXTWriter.writeMap(Histogram.createHistogram(errorWeighted, new LinearDiscretizer(0.1), true),
                "Error (weighted)", "Frequency", String.format("%s/countsErrorWeighted.hist.txt", outdir));

        CountsCompare2GeoJSON.write(calculator, counts, factor, network, outdir);
        NetworkLoad2GeoJSON.write(event.getControler().getScenario().getNetwork(), calculator, factor,
                outdir + "/network.json");
    } catch (IOException e) {
        e.printStackTrace();
    }

    String rootOutDir = event.getControler().getControlerIO().getOutputPath();
    boolean append = false;
    if (event.getIteration() > 0) {
        append = true;
    }
    writeErrorFile(error, String.format("%s/countsError.txt", rootOutDir), append);
    writeErrorFile(errorAbs, String.format("%s/countsAbsError.txt", rootOutDir), append);
}

From source file:playground.johannes.gsv.analysis.CountsCompareAnalyzer.java

private void writeErrorFile(DescriptiveStatistics error, String file, boolean append) {
    try {//from  www .  j  a va  2  s  .  com
        BufferedWriter writer = new BufferedWriter(new FileWriter(file, append));
        if (!append) {
            // write header
            writer.write("mean\tvar\tstderr\tmin\tmax");
            writer.newLine();
        }

        writer.write(String.valueOf(error.getMean()));
        writer.write("\t");
        writer.write(String.valueOf(error.getVariance()));
        writer.write("\t");
        writer.write(String.valueOf(error.getStandardDeviation()));
        writer.write("\t");
        writer.write(String.valueOf(error.getMin()));
        writer.write("\t");
        writer.write(String.valueOf(error.getMax()));
        writer.newLine();

        writer.close();
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:uk.ac.ebi.phenotype.service.ObservationService.java

public Map<String, List<DiscreteTimePoint>> getTimeSeriesMutantData(String parameter, List<String> genes,
        ArrayList<String> strains, String[] center, String[] sex) throws SolrServerException {

    Map<String, List<DiscreteTimePoint>> finalRes = new HashMap<String, List<DiscreteTimePoint>>(); // <allele_accession,
    // timeSeriesData>

    SolrQuery query = new SolrQuery().addFilterQuery(ObservationDTO.BIOLOGICAL_SAMPLE_GROUP + ":experimental")
            .addFilterQuery(ObservationDTO.PARAMETER_STABLE_ID + ":" + parameter);

    String q = (strains.size() > 1) ? "(" + ObservationDTO.STRAIN_ACCESSION_ID + ":\""
            + StringUtils.join(strains.toArray(), "\" OR " + ObservationDTO.STRAIN_ACCESSION_ID + ":\"") + "\")"
            : ObservationDTO.STRAIN_ACCESSION_ID + ":\"" + strains.get(0) + "\"";

    if (genes != null && genes.size() > 0) {
        q += " AND (";
        q += (genes.size() > 1) ? ObservationDTO.GENE_ACCESSION_ID + ":\""
                + StringUtils.join(genes.toArray(), "\" OR " + ObservationDTO.GENE_ACCESSION_ID + ":\"") + "\""
                : ObservationDTO.GENE_ACCESSION_ID + ":\"" + genes.get(0) + "\"";
        q += ")";
    }/*from www.java  2  s  .  com*/

    if (center != null && center.length > 0) {
        q += " AND (";
        q += (center.length > 1)
                ? ObservationDTO.PHENOTYPING_CENTER + ":\""
                        + StringUtils.join(center, "\" OR " + ObservationDTO.PHENOTYPING_CENTER + ":\"") + "\""
                : ObservationDTO.PHENOTYPING_CENTER + ":\"" + center[0] + "\"";
        q += ")";
    }

    if (sex != null && sex.length == 1) {
        q += " AND " + ObservationDTO.SEX + ":\"" + sex[0] + "\"";
    }

    query.setQuery(q);
    query.set("group.field", ObservationDTO.GENE_SYMBOL);
    query.set("group", true);
    query.set("fl", ObservationDTO.DATA_POINT + "," + ObservationDTO.DISCRETE_POINT);
    query.set("group.limit", 100000); // number of documents to be returned
    // per group
    query.set("group.sort", ObservationDTO.DISCRETE_POINT + " asc");
    query.setRows(10000);

    // System.out.println("+_+_+ " + solr.getBaseURL() + "/select?" +
    // query);
    List<Group> groups = solr.query(query).getGroupResponse().getValues().get(0).getValues();
    // for mutants it doesn't seem we need binning
    // groups are the alleles
    for (Group gr : groups) {
        SolrDocumentList resDocs = gr.getResult();
        DescriptiveStatistics stats = new DescriptiveStatistics();
        float discreteTime = (float) resDocs.get(0).getFieldValue(ObservationDTO.DISCRETE_POINT);
        ArrayList<DiscreteTimePoint> res = new ArrayList<DiscreteTimePoint>();
        for (int i = 0; i < resDocs.getNumFound(); i++) {
            SolrDocument doc = resDocs.get(i);
            stats.addValue((float) doc.getFieldValue(ObservationDTO.DATA_POINT));
            if (discreteTime != (float) doc.getFieldValue(ObservationDTO.DISCRETE_POINT)
                    || i == resDocs.getNumFound() - 1) { // we
                // are
                // at
                // the
                // end
                // of
                // the
                // document
                // list
                // add to list
                float discreteDataPoint = (float) stats.getMean();
                DiscreteTimePoint dp = new DiscreteTimePoint(discreteTime, discreteDataPoint,
                        new Float(stats.getStandardDeviation()));
                List<Float> errorPair = new ArrayList<>();
                Float lower = new Float(discreteDataPoint);
                Float higher = new Float(discreteDataPoint);
                errorPair.add(lower);
                errorPair.add(higher);
                dp.setErrorPair(errorPair);
                res.add(dp);
                // update discrete point
                discreteTime = Float.valueOf(doc.getFieldValue(ObservationDTO.DISCRETE_POINT).toString());
                // update stats
                stats = new DescriptiveStatistics();
            }
        }
        // add list
        finalRes.put(gr.getGroupValue(), res);
    }
    return finalRes;
}

From source file:uk.ac.ebi.phenotype.service.ObservationService.java

public List<DiscreteTimePoint> getTimeSeriesControlData(String parameter, ArrayList<String> strains,
        String[] center, String[] sex) throws SolrServerException {

    ArrayList<DiscreteTimePoint> res = new ArrayList<DiscreteTimePoint>();
    SolrQuery query = new SolrQuery().addFilterQuery(ObservationDTO.BIOLOGICAL_SAMPLE_GROUP + ":control")
            .addFilterQuery(ObservationDTO.PARAMETER_STABLE_ID + ":" + parameter);
    String q = (strains.size() > 1) ? "(" + ObservationDTO.STRAIN_ACCESSION_ID + ":\""
            + StringUtils.join(strains.toArray(), "\" OR " + ObservationDTO.STRAIN_ACCESSION_ID + ":\"") + "\")"
            : ObservationDTO.STRAIN_ACCESSION_ID + ":\"" + strains.get(0) + "\"";

    if (center != null && center.length > 0) {
        q += " AND (";
        q += (center.length > 1)/*from ww w  .  j a  v  a2  s. c  o  m*/
                ? ObservationDTO.PHENOTYPING_CENTER + ":\""
                        + StringUtils.join(center, "\" OR " + ObservationDTO.PHENOTYPING_CENTER + ":\"") + "\""
                : ObservationDTO.PHENOTYPING_CENTER + ":\"" + center[0] + "\"";
        q += ")";
    }

    if (sex != null && sex.length == 1) {
        q += " AND " + ObservationDTO.SEX + ":\"" + sex[0] + "\"";
    }

    query.setQuery(q);
    query.set("group.field", ObservationDTO.DISCRETE_POINT);
    query.set("group", true);
    query.set("fl", ObservationDTO.DATA_POINT + "," + ObservationDTO.DISCRETE_POINT);
    query.set("group.limit", 100000); // number of documents to be returned
    // per group
    query.set("sort", ObservationDTO.DISCRETE_POINT + " asc");
    query.setRows(10000);

    // System.out.println("+_+_+ " + solr.getBaseURL() + "/select?" +
    // query);
    List<Group> groups = solr.query(query).getGroupResponse().getValues().get(0).getValues();
    boolean rounding = false;
    // decide if binning is needed i.e. is the increment points are too
    // scattered, as for calorimetry
    if (groups.size() > 30) { // arbitrary value, just piced it because it
        // seems reasonable for the size of our
        // graphs
        if (Float.valueOf(groups.get(groups.size() - 1).getGroupValue())
                - Float.valueOf(groups.get(0).getGroupValue()) <= 30) { // then
            // rounding
            // will
            // be
            // enough
            rounding = true;
        }
    }
    if (rounding) {
        int bin = Math.round(Float.valueOf(groups.get(0).getGroupValue()));
        for (Group gr : groups) {
            int discreteTime = Math.round(Float.valueOf(gr.getGroupValue()));
            // for calormetry ignore what's before -5 and after 16
            if (parameter.startsWith("IMPC_CAL") || parameter.startsWith("ESLIM_003_001")
                    || parameter.startsWith("M-G-P_003_001")) {
                if (discreteTime < -5) {
                    continue;
                } else if (discreteTime > 16) {
                    break;
                }
            }
            float sum = 0;
            SolrDocumentList resDocs = gr.getResult();
            DescriptiveStatistics stats = new DescriptiveStatistics();
            for (SolrDocument doc : resDocs) {
                sum += (float) doc.getFieldValue(ObservationDTO.DATA_POINT);
                stats.addValue((float) doc.getFieldValue(ObservationDTO.DATA_POINT));
            }
            if (bin < discreteTime || groups.indexOf(gr) == groups.size() - 1) { // finished
                // the
                // groups
                // of
                // filled
                // the
                // bin
                float discreteDataPoint = sum / resDocs.getNumFound();
                DiscreteTimePoint dp = new DiscreteTimePoint((float) discreteTime, discreteDataPoint,
                        new Float(stats.getStandardDeviation()));
                List<Float> errorPair = new ArrayList<>();
                double std = stats.getStandardDeviation();
                Float lower = new Float(discreteDataPoint - std);
                Float higher = new Float(discreteDataPoint + std);
                errorPair.add(lower);
                errorPair.add(higher);
                dp.setErrorPair(errorPair);
                res.add(dp);
                bin = discreteTime;
            }
        }
    } else {
        for (Group gr : groups) {
            Float discreteTime = Float.valueOf(gr.getGroupValue());
            float sum = 0;
            SolrDocumentList resDocs = gr.getResult();
            DescriptiveStatistics stats = new DescriptiveStatistics();
            for (SolrDocument doc : resDocs) {
                sum += (float) doc.getFieldValue(ObservationDTO.DATA_POINT);
                stats.addValue((float) doc.getFieldValue(ObservationDTO.DATA_POINT));
            }
            float discreteDataPoint = sum / resDocs.getNumFound();
            DiscreteTimePoint dp = new DiscreteTimePoint(discreteTime, discreteDataPoint,
                    new Float(stats.getStandardDeviation()));
            List<Float> errorPair = new ArrayList<>();
            double std = stats.getStandardDeviation();
            Float lower = new Float(discreteDataPoint - std);
            Float higher = new Float(discreteDataPoint + std);
            errorPair.add(lower);
            errorPair.add(higher);
            dp.setErrorPair(errorPair);
            res.add(dp);
        }
    }
    return res;
}