Example usage for org.apache.commons.math.stat.descriptive.moment Mean Mean

List of usage examples for org.apache.commons.math.stat.descriptive.moment Mean Mean

Introduction

In this page you can find the example usage for org.apache.commons.math.stat.descriptive.moment Mean Mean.

Prototype

public Mean() 

Source Link

Document

Constructs a Mean.

Usage

From source file:de.tudarmstadt.ukp.dkpro.tc.mallet.report.MalletBatchCrossValidationReport.java

@Override
public void execute() throws Exception {
    StorageService store = getContext().getStorageService();

    FlexTable<String> table = FlexTable.forClass(String.class);

    Map<String, List<Double>> key2resultValues = new HashMap<String, List<Double>>();

    for (TaskContextMetadata subcontext : getSubtasks()) {
        String name = BatchTask.class.getSimpleName() + "CrossValidation";
        // one CV batch (which internally ran numFolds times)
        if (subcontext.getLabel().startsWith(name)) {
            Map<String, String> discriminatorsMap = store
                    .retrieveBinary(subcontext.getId(), Task.DISCRIMINATORS_KEY, new PropertiesAdapter())
                    .getMap();/*from  w  w  w  .  j  av a 2  s. co m*/

            File eval = store.getStorageFolder(subcontext.getId(), EVAL_FILE_NAME + SUFFIX_CSV);

            Map<String, String> resultMap = new HashMap<String, String>();

            String[][] evalMatrix = null;

            int i = 0;
            for (String line : FileUtils.readLines(eval)) {
                String[] tokenizedLine = StrTokenizer.getCSVInstance(line).getTokenArray();
                if (evalMatrix == null) {
                    evalMatrix = new String[FileUtils.readLines(eval).size()][tokenizedLine.length];
                }
                evalMatrix[i] = tokenizedLine;
                i++;
            }

            // columns
            for (int j = 0; j < evalMatrix[0].length; j++) {
                String header = evalMatrix[0][j];
                String[] vals = new String[evalMatrix.length - 1];
                // rows
                for (int k = 1; k < evalMatrix.length; k++) {
                    if (evalMatrix[k][j].equals("null")) {
                        vals[k - 1] = String.valueOf(0.);
                    } else {
                        vals[k - 1] = evalMatrix[k][j];
                    }
                }
                Mean mean = new Mean();
                Sum sum = new Sum();
                StandardDeviation std = new StandardDeviation();

                double[] dVals = new double[vals.length];
                Set<String> sVals = new HashSet<String>();
                for (int k = 0; k < vals.length; k++) {
                    try {
                        dVals[k] = Double.parseDouble(vals[k]);
                        sVals = null;
                    } catch (NumberFormatException e) {
                        dVals = null;
                        sVals.add(vals[k]);
                    }
                }

                if (dVals != null) {
                    if (nonAveragedResultsMeasures.contains(header)) {
                        resultMap.put(header, String.valueOf(sum.evaluate(dVals)));
                    } else {
                        resultMap.put(header, String.valueOf(mean.evaluate(dVals)) + "\u00B1"
                                + String.valueOf(std.evaluate(dVals)));
                    }
                } else {
                    if (sVals.size() > 1) {
                        resultMap.put(header, "---");
                    } else {
                        resultMap.put(header, vals[0]);
                    }
                }
            }

            String key = getKey(discriminatorsMap);

            List<Double> results;
            if (key2resultValues.get(key) == null) {
                results = new ArrayList<Double>();
            } else {
                results = key2resultValues.get(key);

            }
            key2resultValues.put(key, results);

            Map<String, String> values = new HashMap<String, String>();
            Map<String, String> cleanedDiscriminatorsMap = new HashMap<String, String>();

            for (String disc : discriminatorsMap.keySet()) {
                if (!ReportUtils.containsExcludePattern(disc, discriminatorsToExclude)) {
                    cleanedDiscriminatorsMap.put(disc, discriminatorsMap.get(disc));
                }
            }
            values.putAll(cleanedDiscriminatorsMap);
            values.putAll(resultMap);

            table.addRow(subcontext.getLabel(), values);
        }
    }

    getContext().getLoggingService().message(getContextLabel(), ReportUtils.getPerformanceOverview(table));

    // Excel cannot cope with more than 255 columns
    if (table.getColumnIds().length <= 255) {
        getContext().storeBinary(EVAL_FILE_NAME + "_compact" + SUFFIX_EXCEL, table.getExcelWriter());
    }
    getContext().storeBinary(EVAL_FILE_NAME + "_compact" + SUFFIX_CSV, table.getCsvWriter());

    table.setCompact(false);
    // Excel cannot cope with more than 255 columns
    if (table.getColumnIds().length <= 255) {
        getContext().storeBinary(EVAL_FILE_NAME + SUFFIX_EXCEL, table.getExcelWriter());
    }
    getContext().storeBinary(EVAL_FILE_NAME + SUFFIX_CSV, table.getCsvWriter());

    // output the location of the batch evaluation folder
    // otherwise it might be hard for novice users to locate this
    File dummyFolder = store.getStorageFolder(getContext().getId(), "dummy");
    // TODO can we also do this without creating and deleting the dummy folder?
    getContext().getLoggingService().message(getContextLabel(),
            "Storing detailed results in:\n" + dummyFolder.getParent() + "\n");
    dummyFolder.delete();
}

From source file:de.tudarmstadt.ukp.dkpro.tc.crfsuite.CRFSuiteBatchCrossValidationReport.java

@Override
public void execute() throws Exception {
    StorageService store = getContext().getStorageService();

    FlexTable<String> table = FlexTable.forClass(String.class);

    Map<String, List<Double>> key2resultValues = new HashMap<String, List<Double>>();

    for (TaskContextMetadata subcontext : getSubtasks()) {
        String name = ExperimentCrossValidation.class.getSimpleName();
        // one CV batch (which internally ran numFolds times)
        if (subcontext.getLabel().startsWith(name)) {
            Map<String, String> discriminatorsMap = store
                    .retrieveBinary(subcontext.getId(), Task.DISCRIMINATORS_KEY, new PropertiesAdapter())
                    .getMap();/*from w  w w . j a va 2 s.c  o m*/

            File eval = store.getStorageFolder(subcontext.getId(), EVAL_FILE_NAME + SUFFIX_CSV);

            Map<String, String> resultMap = new HashMap<String, String>();

            String[][] evalMatrix = null;

            int i = 0;
            for (String line : FileUtils.readLines(eval)) {
                String[] tokenizedLine = StrTokenizer.getCSVInstance(line).getTokenArray();
                if (evalMatrix == null) {
                    evalMatrix = new String[FileUtils.readLines(eval).size()][tokenizedLine.length];
                }
                evalMatrix[i] = tokenizedLine;
                i++;
            }

            // columns
            for (int j = 0; j < evalMatrix[0].length; j++) {
                String header = evalMatrix[0][j];
                String[] vals = new String[evalMatrix.length - 1];
                // rows
                for (int k = 1; k < evalMatrix.length; k++) {
                    if (evalMatrix[k][j].equals("null")) {
                        vals[k - 1] = String.valueOf(0.);
                    } else {
                        vals[k - 1] = evalMatrix[k][j];
                    }

                }
                Mean mean = new Mean();
                Sum sum = new Sum();
                StandardDeviation std = new StandardDeviation();

                double[] dVals = new double[vals.length];
                Set<String> sVals = new HashSet<String>();
                for (int k = 0; k < vals.length; k++) {
                    try {
                        dVals[k] = Double.parseDouble(vals[k]);
                        sVals = null;
                    } catch (NumberFormatException e) {
                        dVals = null;
                        sVals.add(vals[k]);
                    }
                }

                if (dVals != null) {
                    if (nonAveragedResultsMeasures.contains(header)) {
                        resultMap.put(header + foldSum, String.valueOf(sum.evaluate(dVals)));
                    } else {
                        resultMap.put(header + foldAveraged, String.valueOf(
                                mean.evaluate(dVals) + "\u00B1" + String.valueOf(std.evaluate(dVals))));
                    }
                } else {
                    if (sVals.size() > 1) {
                        resultMap.put(header, "---");
                    } else {
                        resultMap.put(header, vals[0]);
                    }
                }
            }

            String key = getKey(discriminatorsMap);

            List<Double> results;
            if (key2resultValues.get(key) == null) {
                results = new ArrayList<Double>();
            } else {
                results = key2resultValues.get(key);

            }
            key2resultValues.put(key, results);

            Map<String, String> values = new HashMap<String, String>();
            Map<String, String> cleanedDiscriminatorsMap = new HashMap<String, String>();

            for (String disc : discriminatorsMap.keySet()) {
                if (!ReportUtils.containsExcludePattern(disc, discriminatorsToExclude)) {
                    cleanedDiscriminatorsMap.put(disc, discriminatorsMap.get(disc));
                }
            }
            values.putAll(cleanedDiscriminatorsMap);
            values.putAll(resultMap);

            table.addRow(subcontext.getLabel(), values);
        }
    }

    getContext().getLoggingService().message(getContextLabel(), ReportUtils.getPerformanceOverview(table));
    // Excel cannot cope with more than 255 columns
    if (table.getColumnIds().length <= 255) {
        getContext().storeBinary(EVAL_FILE_NAME + "_compact" + SUFFIX_EXCEL, table.getExcelWriter());
    }
    getContext().storeBinary(EVAL_FILE_NAME + "_compact" + SUFFIX_CSV, table.getCsvWriter());

    table.setCompact(false);
    // Excel cannot cope with more than 255 columns
    if (table.getColumnIds().length <= 255) {
        getContext().storeBinary(EVAL_FILE_NAME + SUFFIX_EXCEL, table.getExcelWriter());
    }
    getContext().storeBinary(EVAL_FILE_NAME + SUFFIX_CSV, table.getCsvWriter());

    // output the location of the batch evaluation folder
    // otherwise it might be hard for novice users to locate this
    File dummyFolder = store.getStorageFolder(getContext().getId(), "dummy");
    // TODO can we also do this without creating and deleting the dummy folder?
    getContext().getLoggingService().message(getContextLabel(),
            "Storing detailed results in:\n" + dummyFolder.getParent() + "\n");
    dummyFolder.delete();
}

From source file:net.sf.jdmf.util.MathCalculator.java

/**
 * Calculates the centroid of all given points in a nD space (assumes that
 * all points have n coordinates). Each coordinate of the centroid is a mean
 * of all values of the same coordinate of each point.
 * //from   ww w.ja  v  a  2 s.  co m
 * @param points all points
 * @return the centroid of all given points
 */
public Vector<Double> calculateCentroid(List<Vector<Double>> points) {
    List<Mean> coordinateMeans = new ArrayList<Mean>();

    for (int i = 0; i < points.get(0).size(); ++i) {
        coordinateMeans.add(new Mean());
    }

    for (Vector<Double> point : points) {
        for (int i = 0; i < point.size(); ++i) {
            coordinateMeans.get(i).increment(point.get(i));
        }
    }

    Vector<Double> centroid = new Vector<Double>();

    for (Mean mean : coordinateMeans) {
        centroid.add(mean.getResult());
    }

    return centroid;
}

From source file:cerrla.Performance.java

/**
 * Records performance scores using sliding windows of results.
 * /*from  www  . jav a2  s  .  c  o  m*/
 * @param currentEpisode
 *            The current episode.
 */
public void recordPerformanceScore(int currentEpisode) {
    if (recentScores_.isEmpty())
        return;
    // Transform the queues into arrays
    double[] vals = new double[recentScores_.size()];
    int i = 0;
    for (Double val : recentScores_)
        vals[i++] = val.doubleValue();
    double[] envSDs = new double[internalSDs_.size()];
    i = 0;
    for (Double envSD : internalSDs_)
        envSDs[i++] = envSD.doubleValue();

    Mean m = new Mean();
    StandardDeviation sd = new StandardDeviation();
    double mean = m.evaluate(vals);
    double meanDeviation = sd.evaluate(envSDs) * CONVERGENCE_PERCENT_BUFFER;

    Double[] details = new Double[PerformanceDetails.values().length];
    details[PerformanceDetails.EPISODE.ordinal()] = Double.valueOf(currentEpisode);
    details[PerformanceDetails.MEAN.ordinal()] = mean;
    details[PerformanceDetails.SD.ordinal()] = sd.evaluate(vals);
    performanceDetails_.put(currentEpisode, details);

    // Output current means
    if (ProgramArgument.SYSTEM_OUTPUT.booleanValue() && !frozen_) {
        DecimalFormat formatter = new DecimalFormat("#0.00");
        String meanString = formatter.format(mean);
        String sdString = formatter.format(meanDeviation);
        System.out.println("Average performance: " + meanString + " " + SD_SYMBOL + " " + sdString);
    }
    if (frozen_) {
        System.out.println(currentEpisode + ": " + details[PerformanceDetails.MEAN.ordinal()]);
    }
}

From source file:cerrla.ElitesData.java

/**
 * Get the average value of the elite samples.
 * /*  w ww .  jav  a 2s  . com*/
 * @return The average value of the elites.
 */
public Double getMeanEliteValue() {
    if (elitesValues_.isEmpty())
        return null;
    double[] values = new double[elitesValues_.size()];
    int i = 0;
    for (Double val : elitesValues_)
        values[i++] = val;
    Mean m = new Mean();
    return m.evaluate(values);
}

From source file:com.joliciel.jochre.graphics.RowOfShapesImpl.java

@Override
public double getMeanHeight() {
    if (this.heightMean == null) {
        this.heightMean = new Mean();
        for (Shape shape : this.getShapes()) {
            this.heightMean.increment(shape.getHeight());
        }//  w w w . ja  v  a 2 s .  co  m
    }
    return this.heightMean.getResult();
}

From source file:net.sf.katta.tool.loadtest.LoadTestMasterOperation.java

@Override
public void nodeOperationsComplete(MasterContext context, List<OperationResult> nodeResults) throws Exception {
    try {//from   www .j  av  a 2s . c o  m
        final int queryRate = calculateCurrentQueryRate();
        LOG.info("collecting results for iteration " + _currentIteration + " and query rate " + queryRate
                + " after " + (System.currentTimeMillis() - _currentIterationStartTime) + " ms ...");
        List<LoadTestQueryResult> queryResults = new ArrayList<LoadTestQueryResult>();
        for (OperationResult operationResult : nodeResults) {
            if (operationResult == null || operationResult.getUnhandledException() != null) {
                Exception rootException = null;
                if (operationResult != null) {
                    rootException = operationResult.getUnhandledException();
                }
                throw new IllegalStateException(
                        "at least one node operation did not completed properly: " + nodeResults,
                        rootException);
            }
            LoadTestNodeOperationResult nodeOperationResult = (LoadTestNodeOperationResult) operationResult;
            queryResults.addAll(nodeOperationResult.getQueryResults());
        }
        LOG.info("Received " + queryResults.size() + " queries, expected " + queryRate * _runTime / 1000);

        File statisticsFile = new File(_resultDir, "load-test-log-" + _startTime + ".log");
        File resultsFile = new File(_resultDir, "load-test-results-" + _startTime + ".log");
        Writer statisticsWriter = new OutputStreamWriter(new FileOutputStream(statisticsFile, true));
        Writer resultWriter = new OutputStreamWriter(new FileOutputStream(resultsFile, true));
        if (_currentIteration == 0) {
            // print headers
            statisticsWriter.append("#queryRate \tnode \tstartTime \tendTime \telapseTime \tquery \n");
            resultWriter.append(
                    "#requestedQueryRate \tachievedQueryRate \tfiredQueries \tqueryErrors \tavarageQueryDuration \tstandardDeviation  \n");
        }
        try {
            StorelessUnivariateStatistic timeStandardDeviation = new StandardDeviation();
            StorelessUnivariateStatistic timeMean = new Mean();
            int errors = 0;

            for (LoadTestQueryResult result : queryResults) {
                long elapsedTime = result.getEndTime() > 0 ? result.getEndTime() - result.getStartTime() : -1;
                statisticsWriter.write(queryRate + "\t" + result.getNodeId() + "\t" + result.getStartTime()
                        + "\t" + result.getEndTime() + "\t" + elapsedTime + "\t" + result.getQuery() + "\n");
                if (elapsedTime != -1) {
                    timeStandardDeviation.increment(elapsedTime);
                    timeMean.increment(elapsedTime);
                } else {
                    ++errors;
                }
            }
            resultWriter.write(queryRate + "\t" + ((double) queryResults.size() / (_runTime / 1000)) + "\t"
                    + queryResults.size() + "\t" + errors + "\t" + (int) timeMean.getResult() + "\t"
                    + (int) timeStandardDeviation.getResult() + "\n");
        } catch (IOException e) {
            throw new IllegalStateException("Failed to write statistics data.", e);
        }
        try {
            LOG.info("results written to " + resultsFile.getAbsolutePath());
            LOG.info("statistics written to " + statisticsFile.getAbsolutePath());
            statisticsWriter.close();
            resultWriter.close();
        } catch (IOException e) {
            LOG.warn("Failed to close statistics file.");
        }
        if (queryRate + _step <= _endRate) {
            _currentIteration++;
            LOG.info("triggering next iteration " + _currentIteration);
            context.getMasterQueue().add(this);
        } else {
            LOG.info("finish load test in iteration " + _currentIteration + " after "
                    + (System.currentTimeMillis() - _startTime) + " ms");
            context.getProtocol().removeFlag(getName());
        }
    } catch (Exception e) {
        context.getProtocol().removeFlag(getName());
    }
}

From source file:edu.cornell.med.icb.learning.MinMaxScalingRowProcessor.java

private void observeStatistics(final MutableString featureId, final int featureIndex,
        final double[] trimmedArray) {
    final double min = getMin(trimmedArray);
    final double max = getMax(trimmedArray);
    final Mean meanCalculator = new Mean();

    final double mean = meanCalculator.evaluate(trimmedArray);
    final double range = max - min;

    featureIndex2ScaleMean[featureIndex] = mean;
    featureIndex2ScaleRange[featureIndex] = range;
    if (featureId != null) {
        probesetScaleMeanMap.put(featureId, mean);
        probesetScaleRangeMap.put(featureId, range);
    }/* w ww.j av a 2 s .  co  m*/
    if (LOG.isTraceEnabled()) {
        LOG.trace(String.format("training, featureIndex/columnId %d/%s lower: %f higher %f mean %f ",
                featureIndex, featureId, min, max, mean));
    }
}

From source file:Covariance.java

/**
 * Computes the covariance between the two arrays. 
 * //from  w ww .  j  a v  a2 s . c  o m
 * <p>Array lengths must match and the common length must be at least 2.</p> 
 * 
 * @param xArray first data array 
 * @param yArray second data array 
 * @param biasCorrected if true, returned value will be bias-corrected 
 * @return returns the covariance for the two arrays 
 * @throws  IllegalArgumentException if the arrays lengths do not match or 
 * there is insufficient data 
 */
public double covariance(final double[] xArray, int x_index, final double[] yArray, int y_index,
        boolean biasCorrected) throws IllegalArgumentException {
    Mean mean = new Mean();
    double result = 0d;
    int length = xArray.length;
    if (length != yArray.length) {
        //throw MathRuntimeException.createIllegalArgumentException( 
        //LocalizedFormats.DIMENSIONS_MISMATCH_SIMPLE, length, yArray.length); 
    } else if (length < 2) {
        //throw MathRuntimeException.createIllegalArgumentException( 
        //LocalizedFormats.INSUFFICIENT_DIMENSION, length, 2); 
    } else {
        //double xMean = mean.evaluate(xArray); 
        double xMean = this.mean[x_index];
        //double yMean = mean.evaluate(yArray);
        double yMean = this.mean[y_index];
        for (int i = 0; i < length; i++) {
            double xDev = xArray[i] - xMean;
            double yDev = yArray[i] - yMean;
            result += (xDev * yDev - result) / (i + 1);
            //result += (xDev * yDev);
        }
    }
    //return result/(double)(length -1);
    biasCorrected = true;
    return biasCorrected ? result * ((double) length / (double) (length - 1)) : result;
}

From source file:fr.ens.transcriptome.corsen.calc.CorsenHistoryResults.java

/**
 * Get the mean of the median of Min Distances.
 * @return thee mean of the median of Min Distances
 *//*from  www .ja  va2s .  c o m*/
public double getMeanOfMedianMinDistances() {

    return new Mean().evaluate(getDistances());
}