Example usage for org.apache.commons.math.stat.descriptive DescriptiveStatistics getStandardDeviation

List of usage examples for org.apache.commons.math.stat.descriptive DescriptiveStatistics getStandardDeviation

Introduction

In this page you can find the example usage for org.apache.commons.math.stat.descriptive DescriptiveStatistics getStandardDeviation.

Prototype

public double getStandardDeviation() 

Source Link

Document

Returns the standard deviation of the available values.

Usage

From source file:com.netxforge.netxstudio.common.math.NativeFunctions.java

public BigDecimal standardDeviation(List<?> range) {
    assert range != null : new MathException("Range can't be empty");
    DescriptiveStatistics stats = new DescriptiveStatistics();
    double[] dRange = rangeSelection(range);
    for (int i = 0; i < dRange.length; i++) {
        stats.addValue(dRange[i]);// w  ww .j a va2s. c  o  m
    }
    return new BigDecimal(stats.getStandardDeviation());
}

From source file:com.gtwm.pb.model.manageData.WordCloud.java

/**
 * @param textLowerCase/*from w w w.j  a  v  a 2s .  c om*/
 *            Input text, must be lower case
 * @param minWeight
 *            Minimum tag weight, e.g. a font size
 * @param maxWeight
 *            Max. tag weight
 * @param maxTags
 *            Maximum number of tags to return, -1 for all tags
 * @param additionalStopWords
 *            Set of words to specifically exclude, in addition to the
 *            standard set [and, not, after, yes, no, ...]
 */
public WordCloud(String textLowerCase, int minWeight, int maxWeight, int maxTags,
        Set<String> additionalStopWords) {
    String[] wordArray = textLowerCase.split("\\W");
    Set<String> stopWords = new HashSet<String>(Arrays.asList(stopWordsArray));
    for (String additionalStopWord : additionalStopWords) {
        stopWords.add(additionalStopWord.toLowerCase().trim());
    }
    LancasterStemmer stemmer = new LancasterStemmer();
    String wordStem;
    Frequency frequencies = new Frequency();
    for (String wordString : wordArray) {
        if ((!stopWords.contains(wordString)) && (wordString.length() >= minWordLength)) {
            wordStem = stemmer.stripSuffixes(wordString);
            // Record the mapping of the stem to its origin so the most
            // common origin can be re-introduced when the cloud is
            // generated
            this.recordStemOrigin(wordString, wordStem);
            frequencies.addValue(wordStem);
        }
    }
    // Compute std. dev of frequencies so we can remove outliers
    DescriptiveStatistics stats = new DescriptiveStatistics();
    Iterator freqIt = frequencies.valuesIterator();
    long stemFreq;
    while (freqIt.hasNext()) {
        stemFreq = frequencies.getCount(freqIt.next());
        stats.addValue(stemFreq);
    }
    double mean = stats.getMean();
    double stdDev = stats.getStandardDeviation();
    long minFreq = Long.MAX_VALUE;
    long maxFreq = 0;
    // Remove outliers
    freqIt = frequencies.valuesIterator();
    int upperLimit = (int) (mean + (stdDev * 10));
    int lowerLimit = (int) (mean - stdDev);
    if (lowerLimit < 2) {
        lowerLimit = 2;
    }
    int numWords = 0;
    int numRawWords = wordArray.length;
    boolean removeLowOutliers = (numRawWords > (maxTags * 10));
    while (freqIt.hasNext()) {
        wordStem = (String) freqIt.next();
        stemFreq = frequencies.getCount(wordStem);
        // For a large input set, remove high and low outliers.
        // For a smaller set, just high freq. outliers
        if ((stemFreq > upperLimit) || ((stemFreq < lowerLimit) && removeLowOutliers)) {
            freqIt.remove();
        } else {
            numWords++;
            if (stemFreq > maxFreq) {
                maxFreq = stemFreq;
            } else if (stemFreq < minFreq) {
                minFreq = stemFreq;
            }
        }
    }
    // Cut down to exact required number of tags by removing smallest
    if (lowerLimit < minFreq) {
        lowerLimit = (int) minFreq;
    }
    if (numWords > maxTags) {
        while (numWords > maxTags) {
            freqIt = frequencies.valuesIterator();
            SMALLREMOVAL: while (freqIt.hasNext()) {
                stemFreq = frequencies.getCount(freqIt.next());
                if (stemFreq < lowerLimit) {
                    freqIt.remove();
                    numWords--;
                    if (numWords == maxTags) {
                        break SMALLREMOVAL;
                    }
                }
            }
            int step = (int) ((mean - lowerLimit) / 3);
            if (step < 1) {
                step = 1;
            }
            lowerLimit += step;
        }
        // The new min. freq. may have changed
        minFreq = Long.MAX_VALUE;
        freqIt = frequencies.valuesIterator();
        while (freqIt.hasNext()) {
            stemFreq = frequencies.getCount(freqIt.next());
            if (stemFreq < minFreq) {
                minFreq = stemFreq;
            }
        }
    }
    // Scale and create tag objects
    double scaleFactor;
    if (maxFreq == minFreq) {
        scaleFactor = (double) (maxWeight - minWeight) / 4; // TODO: a realistic
        // scale factor in this
        // case
    } else {
        scaleFactor = (double) (maxWeight - minWeight) / (maxFreq - minFreq);
    }
    freqIt = frequencies.valuesIterator();
    int weight;
    while (freqIt.hasNext()) {
        wordStem = (String) freqIt.next();
        stemFreq = frequencies.getCount(wordStem);
        // Might still be some left less than the min. threshold
        if (stemFreq <= minFreq) {
            weight = minWeight;
        } else {
            weight = (int) (Math.ceil((double) (stemFreq - minFreq) * scaleFactor) + minWeight);
        }
        SortedSet<WordInfo> origins = this.stemOriginMap.get(wordStem);
        String mostCommonOrigin = origins.last().getName();
        Set<String> synonyms = new TreeSet<String>();
        for (WordInfo origin : origins) {
            synonyms.add(origin.getName());
        }
        WordInfo word = new Word(mostCommonOrigin, weight, synonyms);
        this.words.add(word);
    }
}

From source file:guineu.modules.filter.Alignment.normalizationSTD.STDNormalizationTask.java

private void normalize(Dataset data) {
    DescriptiveStatistics stats = new DescriptiveStatistics();
    for (String nameExperiment : data.getAllColumnNames()) {
        for (PeakListRow row : data.getRows()) {
            Object value = row.getPeak(nameExperiment);
            if (value != null && value instanceof Double) {
                stats.addValue((Double) value);
            }/*  ww  w. j  a  v  a 2 s  .  c  o  m*/
        }
        for (PeakListRow row : data.getRows()) {
            Object value = row.getPeak(nameExperiment);
            if (value != null && value instanceof Double) {
                row.setPeak(nameExperiment, (Double) value / stats.getStandardDeviation());
            }
        }
        stats.clear();
    }
}

From source file:cs.cirg.cida.components.SynopsisTableModel.java

@Override
public Object getValueAt(int rowIndex, int columnIndex) {
    if (columnIndex == 0) {
        return experiments.get(rowIndex).getName();
    }//  w w  w .j  a va2 s . c o  m
    if (columnIndex % 3 == 1) {
        DescriptiveStatistics descriptiveStatistics = experiments.get(rowIndex)
                .getBottomRowStatistics(variables.get((columnIndex - 1) / 3));
        return descriptiveStatistics.getMean();
    }
    if (columnIndex % 3 == 2) {
        DescriptiveStatistics descriptiveStatistics = experiments.get(rowIndex)
                .getBottomRowStatistics(variables.get((columnIndex - 1) / 3));
        return descriptiveStatistics.apply(new Median());
    }
    DescriptiveStatistics descriptiveStatistics = experiments.get(rowIndex)
            .getBottomRowStatistics(variables.get((columnIndex - 1) / 3));
    return descriptiveStatistics.getStandardDeviation();
}

From source file:guineu.modules.dataanalysis.variationCoefficient.VariationCoefficientTask.java

private double getvariationCoefficient(Dataset dataset) {
    DescriptiveStatistics superStats = new DescriptiveStatistics();
    DescriptiveStatistics stats = new DescriptiveStatistics();
    for (PeakListRow row : dataset.getRows()) {
        stats.clear();/* w  w w .  j  av a  2s  . c o  m*/
        for (String experimentName : dataset.getAllColumnNames()) {
            Object value = row.getPeak(experimentName);
            if (value != null && value instanceof Double) {
                stats.addValue((Double) value);
            } else {

                try {
                    stats.addValue(Double.valueOf((String) value));
                } catch (Exception e) {
                }
            }
        }
        if (stats.getMean() > 0) {
            double value = stats.getStandardDeviation() / stats.getMean();
            superStats.addValue(value);
        }
    }
    return superStats.getMean();
}

From source file:edu.usc.goffish.gopher.sample.stats.N_Hop_Stats.java

@Override
public void reduce(List<SubGraphMessage> messageList) {

    if (getSuperStep() == 0) {

        if (messageList == null || messageList.isEmpty()) {
            voteToHalt();//from w w w . ja  v  a 2 s  . c o m
            return;
        }

        for (SubGraphMessage msg : messageList) {

            SubGraphMessage m = new SubGraphMessage(msg.getData());

            for (int id : partitions) {
                sendMessage(id, m);
            }
        }

    } else {

        DescriptiveStatistics statistics = new DescriptiveStatistics();
        for (SubGraphMessage message : messageList) {

            String data = new String(message.getData());
            Double d = Double.parseDouble(data);
            statistics.addValue(d);

        }

        PrintWriter writer = null;
        try {
            writer = new PrintWriter(new FileWriter("Hop_Stats.log", true));
            System.out.println("LOGGER STD_DIV: " + statistics.getStandardDeviation());
            writer.println("LOGGER STD_DIV: " + statistics.getStandardDeviation());
            writer.flush();
            writer.close();
        } catch (IOException e) {
            e.printStackTrace();
        }

    }

    System.out.println(
            "[Gopher]Current Reduce Iteration : " + getIteration() + " Current SuperStep : " + getSuperStep());

}

From source file:info.raack.appliancedetection.evaluation.model.EvaluationGroup.java

private void calculateEvaluationMetrics(
        Map<ApplianceEnergyConsumptionDetectionAlgorithm, List<Evaluation>> evaluationInfo) {
    for (ApplianceEnergyConsumptionDetectionAlgorithm algorithm : evaluationInfo.keySet()) {
        // do wattage stats
        DescriptiveStatistics stats = new DescriptiveStatistics();

        List<Evaluation> evaluationList = evaluationInfo.get(algorithm);
        if (evaluationList.size() == 0) {
            throw new IllegalArgumentException(
                    "No evaluations for " + algorithm + " in simulation group " + simulationGroup);
        }//from  w w  w. j  ava2  s  .  c  o m
        for (Evaluation evaluation : evaluationList) {
            // calculation produces watts
            stats.addValue((double) (evaluation.getOverallEnergyError())
                    * (3600.0 / (double) (evaluation.getSimulation().getDurationInSeconds())));
        }

        errorMetrics.put(algorithm,
                new Double[] { stats.getMean(), stats.getPercentile(50), stats.getStandardDeviation() });

        //
        stats = new DescriptiveStatistics();

        evaluationList = evaluationInfo.get(algorithm);
        if (evaluationList.size() == 0) {
            throw new IllegalArgumentException(
                    "No evaluations for " + algorithm + " in simulation group " + simulationGroup);
        }
        for (Evaluation evaluation : evaluationList) {
            // calculation produces watts
            stats.addValue((double) (evaluation.getOverallAccuracy()));
        }

        accuracyErrorMetrics.put(algorithm,
                new Double[] { stats.getMean(), stats.getPercentile(50), stats.getStandardDeviation() });

        //
        stats = new DescriptiveStatistics();

        evaluationList = evaluationInfo.get(algorithm);
        if (evaluationList.size() == 0) {
            throw new IllegalArgumentException(
                    "No evaluations for " + algorithm + " in simulation group " + simulationGroup);
        }
        for (Evaluation evaluation : evaluationList) {
            // calculation produces watts
            stats.addValue((double) (evaluation.getStateTransitionAccuracy()));
        }

        stateTransitionAccuracyErrorMetrics.put(algorithm,
                new Double[] { stats.getMean(), stats.getPercentile(50), stats.getStandardDeviation() });

        //
        stats = new DescriptiveStatistics();

        evaluationList = evaluationInfo.get(algorithm);
        if (evaluationList.size() == 0) {
            throw new IllegalArgumentException(
                    "No evaluations for " + algorithm + " in simulation group " + simulationGroup);
        }
        for (Evaluation evaluation : evaluationList) {
            // calculation produces watts
            stats.addValue((double) (evaluation.getStateTransitionRecall()));
        }

        stateTransitionRecallErrorMetrics.put(algorithm,
                new Double[] { stats.getMean(), stats.getPercentile(50), stats.getStandardDeviation() });

        //
        stats = new DescriptiveStatistics();

        evaluationList = evaluationInfo.get(algorithm);
        if (evaluationList.size() == 0) {
            throw new IllegalArgumentException(
                    "No evaluations for " + algorithm + " in simulation group " + simulationGroup);
        }
        for (Evaluation evaluation : evaluationList) {
            // calculation produces watts
            stats.addValue((double) (evaluation.getStateTransitionPrecision()));
        }

        stateTransitionPrecisionErrorMetrics.put(algorithm,
                new Double[] { stats.getMean(), stats.getPercentile(50), stats.getStandardDeviation() });
    }
}

From source file:guineu.modules.dataanalysis.kstest.KSTestTask.java

public void run() {
    try {/*from  w w w  .j  ava2 s.  c  o m*/
        final Rengine rEngine;
        try {
            rEngine = RUtilities.getREngine();
        } catch (Throwable t) {

            throw new IllegalStateException(
                    "Kolmogorov-Smirnov test requires R but it couldn't be loaded (" + t.getMessage() + ')');
        }
        synchronized (RUtilities.R_SEMAPHORE) {

            DescriptiveStatistics stats = new DescriptiveStatistics();
            // assing the values to the matrix
            for (int row = 0; row < dataset.getNumberRows(); row++) {
                rEngine.eval("x <- vector(mode=\"numeric\",length=" + dataset.getNumberCols() + ")");
                stats.clear();
                PeakListRow peakListRow = dataset.getRow(row);
                for (int c = 0; c < dataset.getNumberCols(); c++) {
                    int r = c + 1;
                    double value = (Double) peakListRow.getPeak(dataset.getAllColumnNames().get(c));
                    rEngine.eval("x[" + r + "] <- " + value);
                    stats.addValue(value);
                }

                rEngine.eval("y <- rnorm(" + dataset.getNumberCols() + ", mean= " + stats.getMean() + ", sd = "
                        + stats.getStandardDeviation() + ")");

                rEngine.eval("result <- ks.test(x,y)");
                long e = rEngine.rniParse("result$p.value", 1);
                long r = rEngine.rniEval(e, 0);
                REXP x = new REXP(rEngine, r);
                double pValue = x.asDouble();
                dataset.getRow(row).setVar("setPValue", pValue);
                if (peakListRow.getID() == 68) {
                    rEngine.eval("write.csv(x, \"x.csv\"");
                }
            }

        }
        rEngine.end();
        setStatus(TaskStatus.FINISHED);
    } catch (Exception ex) {
        Logger.getLogger(KSTestTask.class.getName()).log(Level.SEVERE, null, ex);
        setStatus(TaskStatus.ERROR);
    }
}

From source file:net.sf.mzmine.modules.peaklistmethods.dataanalysis.heatmaps.HeatMapTask.java

private void scale(double[][] peakList) {
    DescriptiveStatistics stdDevStats = new DescriptiveStatistics();

    for (int columns = 0; columns < peakList.length; columns++) {
        stdDevStats.clear();/*from ww w  .  j ava  2s.  co m*/
        for (int row = 0; row < peakList[columns].length; row++) {
            if (!Double.isInfinite(peakList[columns][row]) && !Double.isNaN(peakList[columns][row])) {
                stdDevStats.addValue(peakList[columns][row]);
            }
        }

        double stdDev = stdDevStats.getStandardDeviation();

        for (int row = 0; row < peakList[columns].length; row++) {
            if (stdDev != 0) {
                peakList[columns][row] = peakList[columns][row] / stdDev;
            }
        }
    }
}

From source file:edu.usc.goffish.gopher.sample.N_Hop_Stat_Collector.java

@Override
public void compute(List<SubGraphMessage> subGraphMessages) {

    /**/* w  w  w.j a v a  2  s  . c  o  m*/
     * We do this in following steps.
     * Calculate stats for each subgraph.
     * Calculate aggregate stats for partition.
     * In this case a single sub-graph will do the aggregation
     * Aggregate partition level stats and combine at the smallest partition.
     */

    if (superStep == 0) {
        SubGraphMessage msg = subGraphMessages.get(0);
        String data = new String(msg.getData());

        String[] dataSplit = data.split("#");
        N = Integer.parseInt(dataSplit[0]);
        String[] vps = dataSplit[1].split(",");
        for (String vp : vps) {
            vantagePoints.add(vp.trim());
        }

        try {

            Iterable<? extends ISubgraphInstance> subgraphInstances = subgraph.getInstances(Long.MIN_VALUE,
                    Long.MAX_VALUE, PropertySet.EmptyPropertySet, subgraph.getEdgeProperties(), false);

            //                        sliceManager.readInstances(subgraph,
            //                        Long.MIN_VALUE, Long.MAX_VALUE,
            //                        PropertySet.EmptyPropertySet, subgraph.getEdgeProperties());

            for (ISubgraphInstance instance : subgraphInstances) {

                Map<String, DescriptiveStatistics> statsMap = new HashMap<String, DescriptiveStatistics>();

                for (TemplateEdge edge : subgraph.edges()) {

                    ISubgraphObjectProperties edgeProps = instance.getPropertiesForEdge(edge.getId());

                    Integer isExist = (Integer) edgeProps.getValue(IS_EXIST_PROP);
                    if (isExist == 1) {
                        String[] vantageIps = ((String) edgeProps.getValue(VANTAGE_IP_PROP)).split(",");
                        String[] latencies = ((String) edgeProps.getValue(LATENCY_PROP)).split(",");
                        String[] hops = ((String) edgeProps.getValue(HOP_PROP)).split(",");

                        Integer[] vantangeIdx = vantageIpIndex(vantageIps);
                        if (vantangeIdx == null) {
                            continue;
                        }

                        for (int i : vantangeIdx) {

                            String vantage = vantageIps[i];
                            String latency = latencies[i];
                            String hop = hops[i];

                            double latency_num = Double.parseDouble(latency);
                            int hop_num = Integer.parseInt(hop);

                            if (latency_num >= 0 && hop_num == N) {
                                if (statsMap.containsKey(vantage)) {

                                    statsMap.get(vantage).addValue(latency_num);

                                } else {

                                    DescriptiveStatistics statistics = new DescriptiveStatistics();
                                    statistics.addValue(latency_num);
                                    statsMap.put(vantage, statistics);

                                }
                            }
                            ;

                        }

                    }

                }

                int c = 0;
                StringBuffer msgBuffer = new StringBuffer();

                for (String v : statsMap.keySet()) {
                    c++;
                    DescriptiveStatistics statistics = statsMap.get(v);
                    String m = createMessageString(v, instance.getTimestampStart(), instance.getTimestampEnd(),
                            statistics.getStandardDeviation(), statistics.getMean(), statistics.getN());

                    if (c == statsMap.keySet().size()) {
                        msgBuffer.append(m);
                    } else {

                        msgBuffer.append(m).append("|");
                    }

                }

                SubGraphMessage subMsg = new SubGraphMessage(msgBuffer.toString().getBytes());

                sentMessage(partition.getId(), subMsg);

            }

        } catch (IOException e) {
            e.printStackTrace();
            throw new RuntimeException(e);
        }

    } else if (superStep == 1) {
        //Ok here every sub-graph will receive message from its own partition.
        //Each message is belongs to a given some time span.
        Map<String, List<String[]>> vantageGroup = new HashMap<String, List<String[]>>();

        for (SubGraphMessage subGraphMessage : subGraphMessages) {

            String msgData = new String(subGraphMessage.getData());
            String[] dataParts = msgData.split("|");

            for (String data : dataParts) {
                String[] vantageParts = data.split(",");
                //Group by vantage point and startTime
                if (vantageGroup.containsKey(vantageParts[0] + "|" + vantageParts[1])) {
                    vantageGroup.get(vantageParts[0] + "|" + vantageParts[1]).add(vantageParts);
                } else {
                    ArrayList<String[]> arrayList = new ArrayList<String[]>();
                    arrayList.add(vantageParts);
                    vantageGroup.put(vantageParts[0] + "|" + vantageParts[1], arrayList);
                }

            }

        }

        for (String key : vantageGroup.keySet()) {

            if (!acquireLock(key)) {
                continue;
            }

            List<String[]> data = vantageGroup.get(key);

            double totalN = 0;
            double totalAvgVal = 0;

            double totalVar = 0;
            for (String[] d : data) {

                //average
                double mean = Double.parseDouble(d[4]);
                long sN = Long.parseLong(d[5]);
                totalN += sN;
                totalAvgVal += mean * sN;

                double sd = Double.parseDouble(d[3]);
                totalVar += ((double) sd * sd) / ((double) sN);

            }

            double avg = totalAvgVal / totalN;
            double newSD = Math.sqrt(totalVar);

            //create message
            //sent to all the partitions except me.
            String msg = key + "," + newSD + "," + avg + "," + totalN;

            for (int pid : partitions) {
                sentMessage(pid, new SubGraphMessage(msg.getBytes()));
            }

        }

    } else if (superStep >= 2) {

        if (partition.getId() == Collections.min(partitions)) {

            Map<String, List<String[]>> group = new HashMap<String, List<String[]>>();

            for (SubGraphMessage msg : subGraphMessages) {

                String data = new String(msg.getData());

                String[] dataParts = data.split(",");

                if (group.containsKey(dataParts[0])) {
                    group.get(dataParts[0]).add(dataParts);
                } else {
                    List<String[]> list = new ArrayList<String[]>();
                    list.add(dataParts);
                    group.put(dataParts[0], list);
                }

            }

            if (!acquireLock("" + partition.getId())) {
                voteToHalt();
                return;
            }

            PrintWriter writer;
            try {

                writer = new PrintWriter(new FileWriter("TimeSeriesStats.csv"));
            } catch (IOException e) {
                e.printStackTrace();
                throw new RuntimeException(e);
            }
            for (String key : group.keySet()) {

                List<String[]> data = group.get(key);

                double totalN = 0;
                double totalAvgVal = 0;

                double totalVar = 0;
                for (String[] d : data) {

                    //average

                    //key + "," + newSD + "," + avg + "," + totalN;
                    double mean = Double.parseDouble(d[2]);
                    long sN = Long.parseLong(d[3]);
                    totalN += sN;
                    totalAvgVal += mean * sN;

                    double sd = Double.parseDouble(d[1]);
                    totalVar += ((double) sd * sd) / ((double) sN);

                }

                double avg = totalAvgVal / totalN;
                double newSD = Math.sqrt(totalVar);

                String vantage = key.split("|")[0];
                String timeStamp = key.split("|")[1];

                log(writer, vantage, timeStamp, avg, newSD);

            }
            writer.flush();
            voteToHalt();

        }
    }

}