Example usage for org.apache.commons.math.stat.descriptive DescriptiveStatistics getSum

List of usage examples for org.apache.commons.math.stat.descriptive DescriptiveStatistics getSum

Introduction

In this page you can find the example usage for org.apache.commons.math.stat.descriptive DescriptiveStatistics getSum.

Prototype

public double getSum() 

Source Link

Document

Returns the sum of the values that have been added to Univariate.

Usage

From source file:de.unidue.langtech.teaching.rp.uimatools.Stopwatch.java

@Override
public void collectionProcessComplete() throws AnalysisEngineProcessException {
    super.collectionProcessComplete();

    if (isDownstreamTimer()) {
        getLogger().info("Results from Timer '" + timerName + "' after processing all documents.");

        DescriptiveStatistics statTimes = new DescriptiveStatistics();
        for (Long timeValue : times) {
            statTimes.addValue((double) timeValue / 1000);
        }//from ww  w .  j  a  v a2s  .c o m
        double sum = statTimes.getSum();
        double mean = statTimes.getMean();
        double stddev = statTimes.getStandardDeviation();

        StringBuilder sb = new StringBuilder();
        sb.append("Estimate after processing " + times.size() + " documents.");
        sb.append("\n");

        Formatter formatter = new Formatter(sb, Locale.US);

        formatter.format("Aggregated time: %,.1fs\n", sum);
        formatter.format("Time / Document: %,.3fs (%,.3fs)\n", mean, stddev);

        formatter.close();

        getLogger().info(sb.toString());

        if (outputFile != null) {
            try {
                Properties props = new Properties();
                props.setProperty(KEY_SUM, "" + sum);
                props.setProperty(KEY_MEAN, "" + mean);
                props.setProperty(KEY_STDDEV, "" + stddev);
                OutputStream out = new FileOutputStream(outputFile);
                props.store(out, "timer " + timerName + " result file");
            } catch (FileNotFoundException e) {
                throw new AnalysisEngineProcessException(e);
            } catch (IOException e) {
                throw new AnalysisEngineProcessException(e);
            }
        }
    }
}

From source file:cal.binBased.BinMSnSpectrum.java

public double[] getBin_spectrum(int shift) {
    ArrayList<Double> bin_spec_al = new ArrayList<Double>();
    double binSize = (fragment_tolerance * 2), upperLimit = max_value + 0.00001;
    for (double lowerLimit = min_value; lowerLimit < upperLimit; lowerLimit = lowerLimit + binSize) {
        double tmp_intensity_bin = 0;
        DescriptiveStatistics obj = new DescriptiveStatistics();
        for (Peak p : peakList) {
            double mz = p.getMz() + shift;
            if (mz >= lowerLimit && mz < lowerLimit + binSize) {
                obj.addValue(p.intensity);
            }/*from   ww  w  .  j a v  a  2 s . c  o m*/
        }
        if (obj.getN() > 0) {
            if (intensities_sum_or_mean_or_median == 0) {
                tmp_intensity_bin = obj.getSum();
            } else if (intensities_sum_or_mean_or_median == 1) {
                tmp_intensity_bin = obj.getMean();
            } else if (intensities_sum_or_mean_or_median == 2) {
                tmp_intensity_bin = obj.getPercentile(50);
            }
        }
        // put every bin_pectrum
        bin_spec_al.add(tmp_intensity_bin);
    }
    // convert an arraylist to double array
    // initiate size of array
    bin_size = bin_spec_al.size();
    double[] bin_spectrum = new double[bin_spec_al.size()];
    for (int i = 0; i < bin_spec_al.size(); i++) {
        bin_spectrum[i] = bin_spec_al.get(i);
    }
    return bin_spectrum;
}

From source file:cal.binBased.BinMSnSpectrum.java

private ArrayList<double[]> prepareBinSpectra() {
    // first prepare bin-spectrum to be filled with zero
    int size = (2 * correctionFactor) + 1;

    ArrayList<double[]> shiftedSpectra = new ArrayList<double[]>(size);
    for (int i = 0; i < size; i++) {
        double[] shiftedSpectrum = new double[bin_size];
        shiftedSpectra.add(shiftedSpectrum);
    }//  www .j  a v  a2 s.  co m
    // now fill each bin spectrum with correct mz values.
    double binSize = (fragment_tolerance * 2), upperLimit = max_value + 0.00001;
    int current_index = 0;
    for (double lowerLimit = min_value + correctionFactor; lowerLimit < upperLimit
            - correctionFactor; lowerLimit = lowerLimit + binSize) {
        double tmp_intensity_bin = 0;
        DescriptiveStatistics obj = new DescriptiveStatistics();
        for (Peak p : peakList) {
            double mz = p.getMz();
            if (mz >= lowerLimit && mz < lowerLimit + binSize) {
                obj.addValue(p.intensity);
            }
        }
        if (obj.getN() > 0) {
            if (intensities_sum_or_mean_or_median == 0) {
                tmp_intensity_bin = obj.getSum();
            } else if (intensities_sum_or_mean_or_median == 1) {
                tmp_intensity_bin = obj.getMean();
            } else if (intensities_sum_or_mean_or_median == 2) {
                tmp_intensity_bin = obj.getPercentile(50);
            }
        }
        // put every bin_pectrum            
        int filling_index = current_index;
        // check every bin spectrum            
        for (double[] shifted : shiftedSpectra) {
            shifted[filling_index] = tmp_intensity_bin;
            filling_index++;
        }
        current_index++;
    }
    return shiftedSpectra;
}

From source file:datafu.hourglass.jobs.StagedOutputJob.java

/**
 * Writes Hadoop counters and other task statistics to a file in the file system.
 * /*from   ww w  .j  a v  a2  s  .c om*/
 * @param fs
 * @throws IOException
 */
private void writeCounters(final FileSystem fs) throws IOException {
    final Path actualOutputPath = FileOutputFormat.getOutputPath(this);

    SimpleDateFormat timestampFormat = new SimpleDateFormat("yyyyMMddHHmmss");

    String suffix = timestampFormat.format(new Date());

    if (_countersParentPath != null) {
        if (!fs.exists(_countersParentPath)) {
            _log.info("Creating counter parent path " + _countersParentPath);
            fs.mkdirs(_countersParentPath, FsPermission.valueOf("-rwxrwxr-x"));
        }
        // make the name as unique as possible in this case because this may be a directory
        // where other counter files will be dropped
        _countersPath = new Path(_countersParentPath, ".counters." + suffix);
    } else {
        _countersPath = new Path(actualOutputPath, ".counters." + suffix);
    }

    _log.info(String.format("Writing counters to %s", _countersPath));
    FSDataOutputStream counterStream = fs.create(_countersPath);
    BufferedOutputStream buffer = new BufferedOutputStream(counterStream, 256 * 1024);
    OutputStreamWriter writer = new OutputStreamWriter(buffer);
    for (String groupName : getCounters().getGroupNames()) {
        for (Counter counter : getCounters().getGroup(groupName)) {
            writeAndLog(writer, String.format("%s=%d", counter.getName(), counter.getValue()));
        }
    }

    JobID jobID = this.getJobID();

    org.apache.hadoop.mapred.JobID oldJobId = new org.apache.hadoop.mapred.JobID(jobID.getJtIdentifier(),
            jobID.getId());

    long minStart = Long.MAX_VALUE;
    long maxFinish = 0;
    long setupStart = Long.MAX_VALUE;
    long cleanupFinish = 0;
    DescriptiveStatistics mapStats = new DescriptiveStatistics();
    DescriptiveStatistics reduceStats = new DescriptiveStatistics();
    boolean success = true;

    JobClient jobClient = new JobClient(this.conf);

    Map<String, String> taskIdToType = new HashMap<String, String>();

    TaskReport[] setupReports = jobClient.getSetupTaskReports(oldJobId);
    if (setupReports.length > 0) {
        _log.info("Processing setup reports");
        for (TaskReport report : jobClient.getSetupTaskReports(oldJobId)) {
            taskIdToType.put(report.getTaskID().toString(), "SETUP");
            if (report.getStartTime() == 0) {
                _log.warn("Skipping report with zero start time");
                continue;
            }
            setupStart = Math.min(setupStart, report.getStartTime());
        }
    } else {
        _log.error("No setup reports");
    }

    TaskReport[] mapReports = jobClient.getMapTaskReports(oldJobId);
    if (mapReports.length > 0) {
        _log.info("Processing map reports");
        for (TaskReport report : mapReports) {
            taskIdToType.put(report.getTaskID().toString(), "MAP");
            if (report.getFinishTime() == 0 || report.getStartTime() == 0) {
                _log.warn("Skipping report with zero start or finish time");
                continue;
            }
            minStart = Math.min(minStart, report.getStartTime());
            mapStats.addValue(report.getFinishTime() - report.getStartTime());
        }
    } else {
        _log.error("No map reports");
    }

    TaskReport[] reduceReports = jobClient.getReduceTaskReports(oldJobId);
    if (reduceReports.length > 0) {
        _log.info("Processing reduce reports");
        for (TaskReport report : reduceReports) {
            taskIdToType.put(report.getTaskID().toString(), "REDUCE");
            if (report.getFinishTime() == 0 || report.getStartTime() == 0) {
                _log.warn("Skipping report with zero start or finish time");
                continue;
            }
            maxFinish = Math.max(maxFinish, report.getFinishTime());
            reduceStats.addValue(report.getFinishTime() - report.getStartTime());
        }
    } else {
        _log.error("No reduce reports");
    }

    TaskReport[] cleanupReports = jobClient.getCleanupTaskReports(oldJobId);
    if (cleanupReports.length > 0) {
        _log.info("Processing cleanup reports");
        for (TaskReport report : cleanupReports) {
            taskIdToType.put(report.getTaskID().toString(), "CLEANUP");
            if (report.getFinishTime() == 0) {
                _log.warn("Skipping report with finish time of zero");
                continue;
            }
            cleanupFinish = Math.max(cleanupFinish, report.getFinishTime());
        }
    } else {
        _log.error("No cleanup reports");
    }

    if (minStart == Long.MAX_VALUE) {
        _log.error("Could not determine map-reduce start time");
        success = false;
    }
    if (maxFinish == 0) {
        _log.error("Could not determine map-reduce finish time");
        success = false;
    }

    if (setupStart == Long.MAX_VALUE) {
        _log.error("Could not determine setup start time");
        success = false;
    }
    if (cleanupFinish == 0) {
        _log.error("Could not determine cleanup finish time");
        success = false;
    }

    // Collect statistics on successful/failed/killed task attempts, categorized by setup/map/reduce/cleanup.
    // Unfortunately the job client doesn't have an easier way to get these statistics.
    Map<String, Integer> attemptStats = new HashMap<String, Integer>();
    _log.info("Processing task attempts");
    for (TaskCompletionEvent event : getTaskCompletionEvents(jobClient, oldJobId)) {
        String type = taskIdToType.get(event.getTaskAttemptId().getTaskID().toString());
        String status = event.getTaskStatus().toString();

        String key = String.format("%s_%s_ATTEMPTS", status, type);
        if (!attemptStats.containsKey(key)) {
            attemptStats.put(key, 0);
        }
        attemptStats.put(key, attemptStats.get(key) + 1);
    }

    if (success) {
        writeAndLog(writer, String.format("SETUP_START_TIME_MS=%d", setupStart));
        writeAndLog(writer, String.format("CLEANUP_FINISH_TIME_MS=%d", cleanupFinish));
        writeAndLog(writer, String.format("COMPLETE_WALL_CLOCK_TIME_MS=%d", cleanupFinish - setupStart));

        writeAndLog(writer, String.format("MAP_REDUCE_START_TIME_MS=%d", minStart));
        writeAndLog(writer, String.format("MAP_REDUCE_FINISH_TIME_MS=%d", maxFinish));
        writeAndLog(writer, String.format("MAP_REDUCE_WALL_CLOCK_TIME_MS=%d", maxFinish - minStart));

        writeAndLog(writer, String.format("MAP_TOTAL_TASKS=%d", (long) mapStats.getN()));
        writeAndLog(writer, String.format("MAP_MAX_TIME_MS=%d", (long) mapStats.getMax()));
        writeAndLog(writer, String.format("MAP_MIN_TIME_MS=%d", (long) mapStats.getMin()));
        writeAndLog(writer, String.format("MAP_AVG_TIME_MS=%d", (long) mapStats.getMean()));
        writeAndLog(writer, String.format("MAP_STD_TIME_MS=%d", (long) mapStats.getStandardDeviation()));
        writeAndLog(writer, String.format("MAP_SUM_TIME_MS=%d", (long) mapStats.getSum()));

        writeAndLog(writer, String.format("REDUCE_TOTAL_TASKS=%d", (long) reduceStats.getN()));
        writeAndLog(writer, String.format("REDUCE_MAX_TIME_MS=%d", (long) reduceStats.getMax()));
        writeAndLog(writer, String.format("REDUCE_MIN_TIME_MS=%d", (long) reduceStats.getMin()));
        writeAndLog(writer, String.format("REDUCE_AVG_TIME_MS=%d", (long) reduceStats.getMean()));
        writeAndLog(writer, String.format("REDUCE_STD_TIME_MS=%d", (long) reduceStats.getStandardDeviation()));
        writeAndLog(writer, String.format("REDUCE_SUM_TIME_MS=%d", (long) reduceStats.getSum()));

        writeAndLog(writer, String.format("MAP_REDUCE_SUM_TIME_MS=%d",
                (long) mapStats.getSum() + (long) reduceStats.getSum()));

        for (Map.Entry<String, Integer> attemptStat : attemptStats.entrySet()) {
            writeAndLog(writer, String.format("%s=%d", attemptStat.getKey(), attemptStat.getValue()));
        }
    }

    writer.close();
    buffer.close();
    counterStream.close();
}

From source file:org.bresearch.websec.test.CommonsMathTest.java

public void test3() {
    final String data2 = (new WordProcessor()).filterOnlyAlphaNumeric(
            " !!!   Hello my name is a person.   Hello how are you doing.  hello, this is great.  What do you think?   ");

    final BotlistStringUtils utils = new BotlistStringUtils();
    final List<String> a = utils.buildWordList(data2);

    DescriptiveStatistics stats = new DescriptiveStatistics();
    for (int i = 0; i < utils.mapReduceWordSize(a, -1).length; i++) {
        stats.addValue(utils.mapReduceWordSize(a, -1)[i]);
    }//from   ww  w . j  a v  a2  s  . c o m

    // Compute some statistics        
    assertEquals("3.6", "" + stats.getMean());
    assertEquals("54.0", "" + stats.getSum());
}

From source file:org.bresearch.websec.test.CommonsMathTest.java

public void test5() {
    final DocumentWordStats docStats = new DocumentWordStats(ConstDoc.CONST_SM);
    final DescriptiveStatistics stats = docStats.mapReduceStats();

    System.out.println("" + stats.getSum());
    System.out.println("" + stats.getMean());
    System.out.println("" + stats.getN());
    System.out.println("" + stats.getGeometricMean());
    System.out.println("" + stats.getMax());

}

From source file:org.openehealth.ipf.commons.test.performance.processingtime.ProcessingTimeDescriptiveStatistics.java

/**
 * Returns statistical summary of all the data.
 * //ww w  . j av a 2  s. c  om
 * @return a <code>StatisticalSummary</code> object
 */
@Override
public StatisticalSummary getStatisticalSummaryByName(String name) {
    DescriptiveStatistics stats = statisticsByMeasurementName.get(name);

    return new StatisticalSummaryValues(stats.getMean(), stats.getVariance(), stats.getN(), stats.getMax(),
            stats.getMin(), stats.getSum());
}

From source file:org.processmining.analysis.performance.fsmanalysis.FSMPerformanceAnalysisUI.java

protected double getData(DescriptiveStatistics ds) {
    String sort = (String) measureSort.getValue();
    if (sort.equals("Minimum")) {
        return ds.getMin();
    } else if (sort.equals("Average")) {
        return (ds.getMean());
    } else if (sort.equals("Median")) {
        return (ds.getPercentile(50));
    } else if (sort.equals("Maximum")) {
        return (ds.getMax());
    } else if (sort.equals("Sum")) {
        return (ds.getSum());
    } else if (sort.equals("StandDev")) {
        return (ds.getStandardDeviation());
    } else if (sort.equals("Variance")) {
        return (ds.getStandardDeviation() * ds.getStandardDeviation());
    } else if (sort.equals("Frequency")) {
        return (ds.getN());
    }/* w  w  w.ja v  a2  s . c om*/
    return 0.0;
}

From source file:org.processmining.analysis.performance.fsmevaluator.FSMEvaluationAnalysisUI.java

public void initGraphMenu() {
    splitPane = new JSplitPane(JSplitPane.VERTICAL_SPLIT);
    splitPane.setDividerSize(0);//  ww w.  j a v a2  s  . co  m

    initColorBySort();
    menuPanel.add(colorBySort.getPropertyPanel());
    menuPanel.add(min.getPropertyPanel());
    menuPanel.add(max.getPropertyPanel());

    JButton updateButton = new AutoFocusButton("Update");
    updateButton.setOpaque(false);
    updateButton.addActionListener(new ActionListener() {
        public void actionPerformed(ActionEvent arg0) {
            updateGUI();
        }
    });
    menuPanel.add(updateButton);

    mainPanel.setBackground(colorBg);
    menuPanel.setBackground(colorBg);
    chartPanel.setBackground(colorBg);
    splitPane.setBackground(colorBg);

    chartPanel.setLayout(new BorderLayout());

    overallMax = 0.0;
    for (DescriptiveStatistics ds : getTimeMap().values()) {
        if (overallMax < ds.getSum())
            overallMax = ds.getSum();
    }
    adjustTimeScale();
    chartPanel.add(acceptFSM.getGrappaVisualization(), BorderLayout.CENTER);

    splitPane.setLeftComponent(menuPanel);
    splitPane.setRightComponent(chartPanel);
}

From source file:org.processmining.analysis.performance.fsmevaluator.FSMEvaluationAnalysisUI.java

public void updateGUI() {
    overallMax = 0.0;/*from w ww  . ja va  2s .  c  o  m*/
    for (DescriptiveStatistics ds : getTimeMap().values()) {
        if (overallMax < ds.getSum())
            overallMax = ds.getSum();
    }
    adjustTimeScale();
    splitPane.remove(chartPanel);
    chartPanel = null;
    chartPanel = new JPanel();
    chartPanel.setLayout(new BorderLayout());
    chartPanel.add(acceptFSM.getGrappaVisualization(), BorderLayout.CENTER);
    chartPanel.setBackground(colorBg);
    splitPane.setRightComponent(chartPanel);
}