Example usage for org.apache.commons.math.stat.descriptive.summary Sum Sum

List of usage examples for org.apache.commons.math.stat.descriptive.summary Sum Sum

Introduction

In this page you can find the example usage for org.apache.commons.math.stat.descriptive.summary Sum Sum.

Prototype

public Sum() 

Source Link

Document

Create a Sum instance

Usage

From source file:com.discursive.jccook.math.StatExample.java

public static void main(String[] args) {
    double[] values = new double[] { 2.3, 5.4, 6.2, 7.3, 23.3 };

    System.out.println("min: " + StatUtils.min(values));
    System.out.println("max: " + StatUtils.max(values));
    System.out.println("mean: " + StatUtils.mean(values));
    System.out.println("product: " + StatUtils.product(values));
    System.out.println("sum: " + StatUtils.sum(values));
    System.out.println("variance: " + StatUtils.variance(values));

    // Measures from previous example
    Min min = new Min();
    System.out.println("min: " + min.evaluate(values));
    Max max = new Max();
    System.out.println("max: " + max.evaluate(values));
    Mean mean = new Mean();
    System.out.println("mean: " + mean.evaluate(values));
    Product product = new Product();
    System.out.println("product: " + product.evaluate(values));
    Sum sum = new Sum();
    System.out.println("sum: " + sum.evaluate(values));
    Variance variance = new Variance();
    System.out.println("variance: " + variance.evaluate(values));

    // New measures
    Percentile percentile = new Percentile();
    System.out.println("80 percentile value: " + percentile.evaluate(values, 80.0));
    GeometricMean geoMean = new GeometricMean();
    System.out.println("geometric mean: " + geoMean.evaluate(values));
    StandardDeviation stdDev = new StandardDeviation();
    System.out.println("standard dev: " + stdDev.evaluate(values));
    Skewness skewness = new Skewness();
    System.out.println("skewness: " + skewness.evaluate(values));
    Kurtosis kurtosis = new Kurtosis();
    System.out.println("kurtosis: " + kurtosis.evaluate(values));

}

From source file:de.tudarmstadt.ukp.dkpro.tc.mallet.report.MalletBatchCrossValidationReport.java

@Override
public void execute() throws Exception {
    StorageService store = getContext().getStorageService();

    FlexTable<String> table = FlexTable.forClass(String.class);

    Map<String, List<Double>> key2resultValues = new HashMap<String, List<Double>>();

    for (TaskContextMetadata subcontext : getSubtasks()) {
        String name = BatchTask.class.getSimpleName() + "CrossValidation";
        // one CV batch (which internally ran numFolds times)
        if (subcontext.getLabel().startsWith(name)) {
            Map<String, String> discriminatorsMap = store
                    .retrieveBinary(subcontext.getId(), Task.DISCRIMINATORS_KEY, new PropertiesAdapter())
                    .getMap();//from   ww w . ja  v  a2 s . c  om

            File eval = store.getStorageFolder(subcontext.getId(), EVAL_FILE_NAME + SUFFIX_CSV);

            Map<String, String> resultMap = new HashMap<String, String>();

            String[][] evalMatrix = null;

            int i = 0;
            for (String line : FileUtils.readLines(eval)) {
                String[] tokenizedLine = StrTokenizer.getCSVInstance(line).getTokenArray();
                if (evalMatrix == null) {
                    evalMatrix = new String[FileUtils.readLines(eval).size()][tokenizedLine.length];
                }
                evalMatrix[i] = tokenizedLine;
                i++;
            }

            // columns
            for (int j = 0; j < evalMatrix[0].length; j++) {
                String header = evalMatrix[0][j];
                String[] vals = new String[evalMatrix.length - 1];
                // rows
                for (int k = 1; k < evalMatrix.length; k++) {
                    if (evalMatrix[k][j].equals("null")) {
                        vals[k - 1] = String.valueOf(0.);
                    } else {
                        vals[k - 1] = evalMatrix[k][j];
                    }
                }
                Mean mean = new Mean();
                Sum sum = new Sum();
                StandardDeviation std = new StandardDeviation();

                double[] dVals = new double[vals.length];
                Set<String> sVals = new HashSet<String>();
                for (int k = 0; k < vals.length; k++) {
                    try {
                        dVals[k] = Double.parseDouble(vals[k]);
                        sVals = null;
                    } catch (NumberFormatException e) {
                        dVals = null;
                        sVals.add(vals[k]);
                    }
                }

                if (dVals != null) {
                    if (nonAveragedResultsMeasures.contains(header)) {
                        resultMap.put(header, String.valueOf(sum.evaluate(dVals)));
                    } else {
                        resultMap.put(header, String.valueOf(mean.evaluate(dVals)) + "\u00B1"
                                + String.valueOf(std.evaluate(dVals)));
                    }
                } else {
                    if (sVals.size() > 1) {
                        resultMap.put(header, "---");
                    } else {
                        resultMap.put(header, vals[0]);
                    }
                }
            }

            String key = getKey(discriminatorsMap);

            List<Double> results;
            if (key2resultValues.get(key) == null) {
                results = new ArrayList<Double>();
            } else {
                results = key2resultValues.get(key);

            }
            key2resultValues.put(key, results);

            Map<String, String> values = new HashMap<String, String>();
            Map<String, String> cleanedDiscriminatorsMap = new HashMap<String, String>();

            for (String disc : discriminatorsMap.keySet()) {
                if (!ReportUtils.containsExcludePattern(disc, discriminatorsToExclude)) {
                    cleanedDiscriminatorsMap.put(disc, discriminatorsMap.get(disc));
                }
            }
            values.putAll(cleanedDiscriminatorsMap);
            values.putAll(resultMap);

            table.addRow(subcontext.getLabel(), values);
        }
    }

    getContext().getLoggingService().message(getContextLabel(), ReportUtils.getPerformanceOverview(table));

    // Excel cannot cope with more than 255 columns
    if (table.getColumnIds().length <= 255) {
        getContext().storeBinary(EVAL_FILE_NAME + "_compact" + SUFFIX_EXCEL, table.getExcelWriter());
    }
    getContext().storeBinary(EVAL_FILE_NAME + "_compact" + SUFFIX_CSV, table.getCsvWriter());

    table.setCompact(false);
    // Excel cannot cope with more than 255 columns
    if (table.getColumnIds().length <= 255) {
        getContext().storeBinary(EVAL_FILE_NAME + SUFFIX_EXCEL, table.getExcelWriter());
    }
    getContext().storeBinary(EVAL_FILE_NAME + SUFFIX_CSV, table.getCsvWriter());

    // output the location of the batch evaluation folder
    // otherwise it might be hard for novice users to locate this
    File dummyFolder = store.getStorageFolder(getContext().getId(), "dummy");
    // TODO can we also do this without creating and deleting the dummy folder?
    getContext().getLoggingService().message(getContextLabel(),
            "Storing detailed results in:\n" + dummyFolder.getParent() + "\n");
    dummyFolder.delete();
}

From source file:de.tudarmstadt.ukp.dkpro.tc.crfsuite.CRFSuiteBatchCrossValidationReport.java

@Override
public void execute() throws Exception {
    StorageService store = getContext().getStorageService();

    FlexTable<String> table = FlexTable.forClass(String.class);

    Map<String, List<Double>> key2resultValues = new HashMap<String, List<Double>>();

    for (TaskContextMetadata subcontext : getSubtasks()) {
        String name = ExperimentCrossValidation.class.getSimpleName();
        // one CV batch (which internally ran numFolds times)
        if (subcontext.getLabel().startsWith(name)) {
            Map<String, String> discriminatorsMap = store
                    .retrieveBinary(subcontext.getId(), Task.DISCRIMINATORS_KEY, new PropertiesAdapter())
                    .getMap();/* www  . j av  a2 s .  com*/

            File eval = store.getStorageFolder(subcontext.getId(), EVAL_FILE_NAME + SUFFIX_CSV);

            Map<String, String> resultMap = new HashMap<String, String>();

            String[][] evalMatrix = null;

            int i = 0;
            for (String line : FileUtils.readLines(eval)) {
                String[] tokenizedLine = StrTokenizer.getCSVInstance(line).getTokenArray();
                if (evalMatrix == null) {
                    evalMatrix = new String[FileUtils.readLines(eval).size()][tokenizedLine.length];
                }
                evalMatrix[i] = tokenizedLine;
                i++;
            }

            // columns
            for (int j = 0; j < evalMatrix[0].length; j++) {
                String header = evalMatrix[0][j];
                String[] vals = new String[evalMatrix.length - 1];
                // rows
                for (int k = 1; k < evalMatrix.length; k++) {
                    if (evalMatrix[k][j].equals("null")) {
                        vals[k - 1] = String.valueOf(0.);
                    } else {
                        vals[k - 1] = evalMatrix[k][j];
                    }

                }
                Mean mean = new Mean();
                Sum sum = new Sum();
                StandardDeviation std = new StandardDeviation();

                double[] dVals = new double[vals.length];
                Set<String> sVals = new HashSet<String>();
                for (int k = 0; k < vals.length; k++) {
                    try {
                        dVals[k] = Double.parseDouble(vals[k]);
                        sVals = null;
                    } catch (NumberFormatException e) {
                        dVals = null;
                        sVals.add(vals[k]);
                    }
                }

                if (dVals != null) {
                    if (nonAveragedResultsMeasures.contains(header)) {
                        resultMap.put(header + foldSum, String.valueOf(sum.evaluate(dVals)));
                    } else {
                        resultMap.put(header + foldAveraged, String.valueOf(
                                mean.evaluate(dVals) + "\u00B1" + String.valueOf(std.evaluate(dVals))));
                    }
                } else {
                    if (sVals.size() > 1) {
                        resultMap.put(header, "---");
                    } else {
                        resultMap.put(header, vals[0]);
                    }
                }
            }

            String key = getKey(discriminatorsMap);

            List<Double> results;
            if (key2resultValues.get(key) == null) {
                results = new ArrayList<Double>();
            } else {
                results = key2resultValues.get(key);

            }
            key2resultValues.put(key, results);

            Map<String, String> values = new HashMap<String, String>();
            Map<String, String> cleanedDiscriminatorsMap = new HashMap<String, String>();

            for (String disc : discriminatorsMap.keySet()) {
                if (!ReportUtils.containsExcludePattern(disc, discriminatorsToExclude)) {
                    cleanedDiscriminatorsMap.put(disc, discriminatorsMap.get(disc));
                }
            }
            values.putAll(cleanedDiscriminatorsMap);
            values.putAll(resultMap);

            table.addRow(subcontext.getLabel(), values);
        }
    }

    getContext().getLoggingService().message(getContextLabel(), ReportUtils.getPerformanceOverview(table));
    // Excel cannot cope with more than 255 columns
    if (table.getColumnIds().length <= 255) {
        getContext().storeBinary(EVAL_FILE_NAME + "_compact" + SUFFIX_EXCEL, table.getExcelWriter());
    }
    getContext().storeBinary(EVAL_FILE_NAME + "_compact" + SUFFIX_CSV, table.getCsvWriter());

    table.setCompact(false);
    // Excel cannot cope with more than 255 columns
    if (table.getColumnIds().length <= 255) {
        getContext().storeBinary(EVAL_FILE_NAME + SUFFIX_EXCEL, table.getExcelWriter());
    }
    getContext().storeBinary(EVAL_FILE_NAME + SUFFIX_CSV, table.getCsvWriter());

    // output the location of the batch evaluation folder
    // otherwise it might be hard for novice users to locate this
    File dummyFolder = store.getStorageFolder(getContext().getId(), "dummy");
    // TODO can we also do this without creating and deleting the dummy folder?
    getContext().getLoggingService().message(getContextLabel(),
            "Storing detailed results in:\n" + dummyFolder.getParent() + "\n");
    dummyFolder.delete();
}

From source file:org.apache.accumulo.core.util.Stat.java

public Stat() {
    min = new Min();
    max = new Max();
    sum = new Sum();
    mean = new Mean();
    sd = new StandardDeviation();

    stats = new StorelessUnivariateStatistic[] { min, max, sum, mean, sd };
}

From source file:org.beedraz.semantics_II.expression.number.real.double64.stat.DoubleSetSumBeed.java

/**
 * @post  getSource() == null;/* w w  w.j  a  v  a2 s  .  c o m*/
 * @post  getDouble() == null;
 * @post  owner != null ? owner.registerAggregateElement(this);
 */
public DoubleSetSumBeed(AggregateBeed owner) {
    super(new Sum(), owner);
}

From source file:org.NooLab.math3.stat.inference.OneWayAnova.java

/**
 * This method actually does the calculations (except P-value).
 *
 * @param categoryData <code>Collection</code> of <code>double[]</code>
 * arrays each containing data for one category
 * @return computed AnovaStats//from  ww w .  j a v  a 2s. c o m
 * @throws NullArgumentException if <code>categoryData</code> is <code>null</code>
 * @throws DimensionMismatchException if the length of the <code>categoryData</code>
 * array is less than 2 or a contained <code>double[]</code> array does not contain
 * at least two values
 */
private AnovaStats anovaStats(final Collection<double[]> categoryData)
        throws NullArgumentException, DimensionMismatchException {

    if (categoryData == null) {
        throw new NullArgumentException();
    }

    // check if we have enough categories
    if (categoryData.size() < 2) {
        throw new DimensionMismatchException(LocalizedFormats.TWO_OR_MORE_CATEGORIES_REQUIRED,
                categoryData.size(), 2);
    }

    // check if each category has enough data and all is double[]
    for (double[] array : categoryData) {
        if (array.length <= 1) {
            throw new DimensionMismatchException(LocalizedFormats.TWO_OR_MORE_VALUES_IN_CATEGORY_REQUIRED,
                    array.length, 2);
        }
    }

    int dfwg = 0;
    double sswg = 0;
    Sum totsum = new Sum();
    SumOfSquares totsumsq = new SumOfSquares();
    int totnum = 0;

    for (double[] data : categoryData) {

        Sum sum = new Sum();
        SumOfSquares sumsq = new SumOfSquares();
        int num = 0;

        for (int i = 0; i < data.length; i++) {
            double val = data[i];

            // within category
            num++;
            sum.increment(val);
            sumsq.increment(val);

            // for all categories
            totnum++;
            totsum.increment(val);
            totsumsq.increment(val);
        }
        dfwg += num - 1;
        double ss = sumsq.getResult() - sum.getResult() * sum.getResult() / num;
        sswg += ss;
    }
    double sst = totsumsq.getResult() - totsum.getResult() * totsum.getResult() / totnum;
    double ssbg = sst - sswg;
    int dfbg = categoryData.size() - 1;
    double msbg = ssbg / dfbg;
    double mswg = sswg / dfwg;
    double F = msbg / mswg;

    return new AnovaStats(dfbg, dfwg, F);
}