Example usage for org.apache.commons.math.stat.descriptive.moment Mean evaluate

List of usage examples for org.apache.commons.math.stat.descriptive.moment Mean evaluate

Introduction

In this page you can find the example usage for org.apache.commons.math.stat.descriptive.moment Mean evaluate.

Prototype

@Override
public double evaluate(final double[] values) 

Source Link

Document

This default implementation calls #clear , then invokes #increment in a loop over the the input array, and then uses #getResult to compute the return value.

Usage

From source file:com.discursive.jccook.math.StatExample.java

public static void main(String[] args) {
    double[] values = new double[] { 2.3, 5.4, 6.2, 7.3, 23.3 };

    System.out.println("min: " + StatUtils.min(values));
    System.out.println("max: " + StatUtils.max(values));
    System.out.println("mean: " + StatUtils.mean(values));
    System.out.println("product: " + StatUtils.product(values));
    System.out.println("sum: " + StatUtils.sum(values));
    System.out.println("variance: " + StatUtils.variance(values));

    // Measures from previous example
    Min min = new Min();
    System.out.println("min: " + min.evaluate(values));
    Max max = new Max();
    System.out.println("max: " + max.evaluate(values));
    Mean mean = new Mean();
    System.out.println("mean: " + mean.evaluate(values));
    Product product = new Product();
    System.out.println("product: " + product.evaluate(values));
    Sum sum = new Sum();
    System.out.println("sum: " + sum.evaluate(values));
    Variance variance = new Variance();
    System.out.println("variance: " + variance.evaluate(values));

    // New measures
    Percentile percentile = new Percentile();
    System.out.println("80 percentile value: " + percentile.evaluate(values, 80.0));
    GeometricMean geoMean = new GeometricMean();
    System.out.println("geometric mean: " + geoMean.evaluate(values));
    StandardDeviation stdDev = new StandardDeviation();
    System.out.println("standard dev: " + stdDev.evaluate(values));
    Skewness skewness = new Skewness();
    System.out.println("skewness: " + skewness.evaluate(values));
    Kurtosis kurtosis = new Kurtosis();
    System.out.println("kurtosis: " + kurtosis.evaluate(values));

}

From source file:jCloisterZone.CarcassonneEnvironment.java

public static void main(String[] args) {
    int repetitions = 100;
    double[] scores = new double[repetitions];

    RRLJCloisterClient client = new LocalCarcassonneClient("config.ini");
    ServerIF server = null;/*w  w  w.ja v a  2s.  co  m*/
    Game game = client.getGame();
    Player firstPlayer = null;
    ArrayList<PlayerSlot> slots = new ArrayList<PlayerSlot>();
    for (int r = 0; r < repetitions; r++) {
        client.createGame();
        if (game == null) {
            server = new LocalCarcassonneServer(client.getGame());
            PlayerSlot slot = new PlayerSlot(0, PlayerSlot.SlotType.AI, "RANDOM" + 0, client.getClientId());
            slot.setAiClassName(RandomAIPlayer.class.getName());
            slots.add(slot);
            for (int j = 1; j < Integer.parseInt(args[0]); j++) {
                slot = new PlayerSlot(j, PlayerSlot.SlotType.AI, "AI" + j, client.getClientId());
                slot.setAiClassName(LegacyAiPlayer.class.getName());
                slots.add(slot);
            }
            game = client.getGame();
        } else {
            // Reset the UIs
            server.stopGame();
            game.clearUserInterface();

            // Clear the slots and re-add them.
            for (int i = 0; i < PlayerSlot.COUNT; i++) {
                server.updateSlot(new PlayerSlot(i), null);
            }
        }

        Collections.shuffle(slots);
        for (int i = 0; i < slots.size(); i++) {
            PlayerSlot slot = slots.get(i);
            PlayerSlot cloneSlot = new PlayerSlot(i, slot.getType(), slot.getNick(), slot.getOwner());
            cloneSlot.setAiClassName(slot.getAiClassName());
            server.updateSlot(cloneSlot, LegacyAiPlayer.supportedExpansions());
        }

        server.startGame();

        Phase phase = game.getPhase();

        // Cycle through (probably only once) to keep the game moving.
        while (phase != null && !phase.isEntered()) {
            // Modifying phases to proxyless versions
            if (phase.getClass().equals(CreateGamePhase.class))
                phase = game.getPhases().get(ProxylessCreateGamePhase.class);
            if (phase.getClass().equals(DrawPhase.class))
                phase = game.getPhases().get(ProxylessDrawPhase.class);

            phase.setEntered(true);
            phase.enter();
            phase = game.getPhase();

            if (game.getTurnPlayer().getNick().equals("RANDOM0"))
                firstPlayer = game.getTurnPlayer();
        }
        int score = firstPlayer.getPoints();
        scores[r] = score;
        System.out.println(score);
    }

    Mean m = new Mean();
    StandardDeviation sd = new StandardDeviation();
    System.out.println("Mean: " + m.evaluate(scores) + ", SD: " + sd.evaluate(scores));
}

From source file:com.srotya.sidewinder.core.analytics.TestMathUtils.java

@Test
public void testMean() {
    double[] a = new double[] { 2, 3, 4, 5, 6 };
    double mean = MathUtils.mean(a);
    Mean cmean = new Mean();
    assertEquals(cmean.evaluate(a), mean, 0.0001);
}

From source file:net.sf.jdmf.util.MathCalculator.java

/**
 * Calculates the mean of all attribute values.
 * //from  w  w  w .j  a v a 2  s .  c  om
 * @param attributeValues attribute values
 * @return the mean
 */
public Double calculateMean(Comparable[] attributeValues) {
    Mean mean = new Mean();

    Double evaluatedMean = mean.evaluate(convertToPrimitives(attributeValues));

    log.debug("mean = " + evaluatedMean);

    return evaluatedMean;
}

From source file:edu.cornell.med.icb.learning.MinMaxScalingRowProcessor.java

private void observeStatistics(final MutableString featureId, final int featureIndex,
        final double[] trimmedArray) {
    final double min = getMin(trimmedArray);
    final double max = getMax(trimmedArray);
    final Mean meanCalculator = new Mean();

    final double mean = meanCalculator.evaluate(trimmedArray);
    final double range = max - min;

    featureIndex2ScaleMean[featureIndex] = mean;
    featureIndex2ScaleRange[featureIndex] = range;
    if (featureId != null) {
        probesetScaleMeanMap.put(featureId, mean);
        probesetScaleRangeMap.put(featureId, range);
    }/*  w ww. j  a v  a  2s . c o  m*/
    if (LOG.isTraceEnabled()) {
        LOG.trace(String.format("training, featureIndex/columnId %d/%s lower: %f higher %f mean %f ",
                featureIndex, featureId, min, max, mean));
    }
}

From source file:fr.ens.transcriptome.corsen.util.StatTest.java

public void testMean() {

    Mean mean = new Mean();

    for (int i = 0; i < 1000; i++) {

        List<DataDouble> list = generate();
        assertEquals(mean.evaluate(Stats.toDouble(list)), Stats.mean(list));
    }// www.j a v a 2  s.  c om
}

From source file:cerrla.ElitesData.java

/**
 * Get the average value of the elite samples.
 * /*from w ww  .  jav  a 2s  . c  o  m*/
 * @return The average value of the elites.
 */
public Double getMeanEliteValue() {
    if (elitesValues_.isEmpty())
        return null;
    double[] values = new double[elitesValues_.size()];
    int i = 0;
    for (Double val : elitesValues_)
        values[i++] = val;
    Mean m = new Mean();
    return m.evaluate(values);
}

From source file:cerrla.Performance.java

/**
 * Records performance scores using sliding windows of results.
 * // w  ww  .ja  va2s  . c o  m
 * @param currentEpisode
 *            The current episode.
 */
public void recordPerformanceScore(int currentEpisode) {
    if (recentScores_.isEmpty())
        return;
    // Transform the queues into arrays
    double[] vals = new double[recentScores_.size()];
    int i = 0;
    for (Double val : recentScores_)
        vals[i++] = val.doubleValue();
    double[] envSDs = new double[internalSDs_.size()];
    i = 0;
    for (Double envSD : internalSDs_)
        envSDs[i++] = envSD.doubleValue();

    Mean m = new Mean();
    StandardDeviation sd = new StandardDeviation();
    double mean = m.evaluate(vals);
    double meanDeviation = sd.evaluate(envSDs) * CONVERGENCE_PERCENT_BUFFER;

    Double[] details = new Double[PerformanceDetails.values().length];
    details[PerformanceDetails.EPISODE.ordinal()] = Double.valueOf(currentEpisode);
    details[PerformanceDetails.MEAN.ordinal()] = mean;
    details[PerformanceDetails.SD.ordinal()] = sd.evaluate(vals);
    performanceDetails_.put(currentEpisode, details);

    // Output current means
    if (ProgramArgument.SYSTEM_OUTPUT.booleanValue() && !frozen_) {
        DecimalFormat formatter = new DecimalFormat("#0.00");
        String meanString = formatter.format(mean);
        String sdString = formatter.format(meanDeviation);
        System.out.println("Average performance: " + meanString + " " + SD_SYMBOL + " " + sdString);
    }
    if (frozen_) {
        System.out.println(currentEpisode + ": " + details[PerformanceDetails.MEAN.ordinal()]);
    }
}

From source file:cerrla.Performance.java

/**
 * Saves the performance to file and outputs them. Saves to two files: One
 * with a breakdown of the generators, and another with purely episodic
 * performances./*ww  w.  ja  va2 s  .  co m*/
 * 
 * @param policyGenerator
 *            The policy generator to save the distributions from.
 * @param perfFile
 *            The performance file to save to.
 * @param finalWrite
 *            If this write was the final write for the run.
 */
private void savePerformance(PolicyGenerator policyGenerator, File perfFile, boolean finalWrite)
        throws Exception {
    // TODO May be memory leak around here somewhere.
    if (performanceDetails_.isEmpty())
        return;

    FileWriter wr = null;
    BufferedWriter buf = null;
    int lastKey = performanceDetails_.lastKey();
    Double[] lastDetails = performanceDetails_.get(lastKey);

    if (Config.getInstance().getGeneratorFile() == null) {
        // If the file has just been created, add the arguments to the head
        // of the file
        boolean newFile = perfFile.createNewFile();

        wr = new FileWriter(perfFile, true);
        buf = new BufferedWriter(wr);

        // If the file is fresh, add the program args to the top
        if (newFile)
            Config.writeFileHeader(buf, policyGenerator.getGoalCondition());

        policyGenerator.saveGenerators(buf, finalWrite);
        buf.write("\n\n" + lastKey + "\t" + lastDetails[PerformanceDetails.MEAN.ordinal()] + "\n");
        buf.write("\n\n\n");

        if (finalWrite) {
            buf.write(Config.END_PERFORMANCE + "\n");
            buf.write("Total training time: "
                    + RRLExperiment.toTimeFormat(trainingEndTime_ - trainingStartTime_));
        }

        buf.close();
        wr.close();
    }

    // Writing the raw performance
    File rawNumbers = null;
    if (Config.getInstance().getGeneratorFile() == null)
        rawNumbers = new File(perfFile.getAbsoluteFile() + "raw");
    else
        rawNumbers = new File(perfFile.getAbsoluteFile() + "greedy");

    wr = new FileWriter(rawNumbers);
    buf = new BufferedWriter(wr);

    if (ProgramArgument.SYSTEM_OUTPUT.booleanValue() && policyGenerator.getGoalCondition().isMainGoal())
        System.out.println("Average episode scores:");

    if (finalWrite) {
        // Average the final elite scores
        Mean m = new Mean();
        double[] finalElites = new double[finalEliteScores_.size()];
        int i = 0;
        for (Double val : finalEliteScores_)
            finalElites[i++] = val;
        double meanBestVal = m.evaluate(finalElites);
        lastDetails[PerformanceDetails.ELITEMAX.ordinal()] = meanBestVal;
    }

    // Noting the raw numbers
    buf.write("Episode\tMean\tSD\tEliteMean\tEliteMax\tNumSlots\tNumRules\tN\tConvergence\n");
    for (Integer episode : performanceDetails_.keySet()) {
        Double[] details = performanceDetails_.get(episode);
        String performanceData = episode + "\t" + details[PerformanceDetails.MEAN.ordinal()] + "\t"
                + details[PerformanceDetails.SD.ordinal()] + "\t"
                + details[PerformanceDetails.ELITEMEAN.ordinal()] + "\t"
                + details[PerformanceDetails.ELITEMAX.ordinal()] + "\t"
                + details[PerformanceDetails.NUMSLOTS.ordinal()] + "\t"
                + details[PerformanceDetails.NUMRULES.ordinal()] + "\t"
                + details[PerformanceDetails.POPULATION.ordinal()] + "\t"
                + details[PerformanceDetails.CONVERGENCE.ordinal()] + "\t" + "\n";
        buf.write(performanceData);

        if (ProgramArgument.SYSTEM_OUTPUT.booleanValue() && policyGenerator.getGoalCondition().isMainGoal()) {
            System.out.println(episode + "\t" + details[PerformanceDetails.MEAN.ordinal()] + "\t" + SD_SYMBOL
                    + "\t" + details[PerformanceDetails.SD.ordinal()]);
        }
    }

    buf.close();
    wr.close();

    //      if (Config.getInstance().getGeneratorFile() == null) {
    //         // Writing the mutation tree
    //         File mutationTreeFile = new File(perfFile.getAbsoluteFile()
    //               + "mutation");
    //
    //         wr = new FileWriter(mutationTreeFile);
    //         buf = new BufferedWriter(wr);
    //         // policyGenerator.saveMutationTree(buf);
    //
    //         buf.close();
    //         wr.close();
    //      }
}

From source file:de.tudarmstadt.ukp.dkpro.tc.mallet.report.MalletBatchCrossValidationReport.java

@Override
public void execute() throws Exception {
    StorageService store = getContext().getStorageService();

    FlexTable<String> table = FlexTable.forClass(String.class);

    Map<String, List<Double>> key2resultValues = new HashMap<String, List<Double>>();

    for (TaskContextMetadata subcontext : getSubtasks()) {
        String name = BatchTask.class.getSimpleName() + "CrossValidation";
        // one CV batch (which internally ran numFolds times)
        if (subcontext.getLabel().startsWith(name)) {
            Map<String, String> discriminatorsMap = store
                    .retrieveBinary(subcontext.getId(), Task.DISCRIMINATORS_KEY, new PropertiesAdapter())
                    .getMap();//w w  w .j av a 2s .  c o  m

            File eval = store.getStorageFolder(subcontext.getId(), EVAL_FILE_NAME + SUFFIX_CSV);

            Map<String, String> resultMap = new HashMap<String, String>();

            String[][] evalMatrix = null;

            int i = 0;
            for (String line : FileUtils.readLines(eval)) {
                String[] tokenizedLine = StrTokenizer.getCSVInstance(line).getTokenArray();
                if (evalMatrix == null) {
                    evalMatrix = new String[FileUtils.readLines(eval).size()][tokenizedLine.length];
                }
                evalMatrix[i] = tokenizedLine;
                i++;
            }

            // columns
            for (int j = 0; j < evalMatrix[0].length; j++) {
                String header = evalMatrix[0][j];
                String[] vals = new String[evalMatrix.length - 1];
                // rows
                for (int k = 1; k < evalMatrix.length; k++) {
                    if (evalMatrix[k][j].equals("null")) {
                        vals[k - 1] = String.valueOf(0.);
                    } else {
                        vals[k - 1] = evalMatrix[k][j];
                    }
                }
                Mean mean = new Mean();
                Sum sum = new Sum();
                StandardDeviation std = new StandardDeviation();

                double[] dVals = new double[vals.length];
                Set<String> sVals = new HashSet<String>();
                for (int k = 0; k < vals.length; k++) {
                    try {
                        dVals[k] = Double.parseDouble(vals[k]);
                        sVals = null;
                    } catch (NumberFormatException e) {
                        dVals = null;
                        sVals.add(vals[k]);
                    }
                }

                if (dVals != null) {
                    if (nonAveragedResultsMeasures.contains(header)) {
                        resultMap.put(header, String.valueOf(sum.evaluate(dVals)));
                    } else {
                        resultMap.put(header, String.valueOf(mean.evaluate(dVals)) + "\u00B1"
                                + String.valueOf(std.evaluate(dVals)));
                    }
                } else {
                    if (sVals.size() > 1) {
                        resultMap.put(header, "---");
                    } else {
                        resultMap.put(header, vals[0]);
                    }
                }
            }

            String key = getKey(discriminatorsMap);

            List<Double> results;
            if (key2resultValues.get(key) == null) {
                results = new ArrayList<Double>();
            } else {
                results = key2resultValues.get(key);

            }
            key2resultValues.put(key, results);

            Map<String, String> values = new HashMap<String, String>();
            Map<String, String> cleanedDiscriminatorsMap = new HashMap<String, String>();

            for (String disc : discriminatorsMap.keySet()) {
                if (!ReportUtils.containsExcludePattern(disc, discriminatorsToExclude)) {
                    cleanedDiscriminatorsMap.put(disc, discriminatorsMap.get(disc));
                }
            }
            values.putAll(cleanedDiscriminatorsMap);
            values.putAll(resultMap);

            table.addRow(subcontext.getLabel(), values);
        }
    }

    getContext().getLoggingService().message(getContextLabel(), ReportUtils.getPerformanceOverview(table));

    // Excel cannot cope with more than 255 columns
    if (table.getColumnIds().length <= 255) {
        getContext().storeBinary(EVAL_FILE_NAME + "_compact" + SUFFIX_EXCEL, table.getExcelWriter());
    }
    getContext().storeBinary(EVAL_FILE_NAME + "_compact" + SUFFIX_CSV, table.getCsvWriter());

    table.setCompact(false);
    // Excel cannot cope with more than 255 columns
    if (table.getColumnIds().length <= 255) {
        getContext().storeBinary(EVAL_FILE_NAME + SUFFIX_EXCEL, table.getExcelWriter());
    }
    getContext().storeBinary(EVAL_FILE_NAME + SUFFIX_CSV, table.getCsvWriter());

    // output the location of the batch evaluation folder
    // otherwise it might be hard for novice users to locate this
    File dummyFolder = store.getStorageFolder(getContext().getId(), "dummy");
    // TODO can we also do this without creating and deleting the dummy folder?
    getContext().getLoggingService().message(getContextLabel(),
            "Storing detailed results in:\n" + dummyFolder.getParent() + "\n");
    dummyFolder.delete();
}