Example usage for org.apache.commons.math3.stat.descriptive DescriptiveStatistics getPercentile

List of usage examples for org.apache.commons.math3.stat.descriptive DescriptiveStatistics getPercentile

Introduction

In this page you can find the example usage for org.apache.commons.math3.stat.descriptive DescriptiveStatistics getPercentile.

Prototype

public double getPercentile(double p) throws MathIllegalStateException, MathIllegalArgumentException 

Source Link

Document

Returns an estimate for the pth percentile of the stored values.

Usage

From source file:org.wildfly.swarm.proc.CSVCollector.java

public void onFinish(String id) {

    List<Object> record = new ArrayList<>();
    record.add(id);/*from ww w  . jav a 2s . co  m*/
    record.add(Paths.get(id).getFileName());
    for (Measure m : Measure.values()) {
        if (!results.containsKey(m)) {
            throw new RuntimeException("Measurement is missing " + m);
        }

        DescriptiveStatistics stats = results.get(m);
        record.add(stats.getN());
        record.add(stats.getMin());
        record.add(stats.getMax());
        record.add(stats.getMean());
        record.add(stats.getStandardDeviation());
        record.add(stats.getPercentile(50));
        record.add(stats.getPercentile(75));
    }

    try {
        csvOutput.printRecord(record);
        csvOutput.flush();
    } catch (IOException e) {
        throw new RuntimeException("Failed to write data", e);
    }
}

From source file:org.wildfly.swarm.proc.SystemOutCollector.java

public void onFinish(String id) {
    System.out.println("Results for " + id);
    for (Measure m : results.keySet()) {
        DescriptiveStatistics stats = results.get(m);
        System.out.println(m.name() + " Samples: " + stats.getN());
        System.out.println(m.name() + " min: " + stats.getMin());
        System.out.println(m.name() + " max: " + stats.getMax());
        System.out.println(m.name() + " 75p: " + stats.getPercentile(75));
    }/*from   ww  w  . ja va2s  . c  o  m*/

}

From source file:org.wso2.carbon.ml.core.spark.models.ext.AnomalyDetectionModel.java

/**
 * This method is to get the percentile distances map
 * key : percentile value/*from  ww  w.  j a  va2  s.  co m*/
 * value : distance value
 * This will return cluster boundary distance values with respect to each percentile
 */
private Map<Integer, Double> getPercentileDistancesMap(double percentileValue) {

    // Get a DescriptiveStatistics instance
    DescriptiveStatistics stats = new DescriptiveStatistics();
    /*
     * key : percentile value
     * value : distance value
     */
    Map<Integer, Double> percentilesMap = new HashMap<Integer, Double>();

    // calculating percentile distance of each cluster
    for (int clusterIndex = 0; clusterIndex < clusterIndexToDistancesListMap.size(); clusterIndex++) {

        for (double distance : clusterIndexToDistancesListMap.get(clusterIndex)) {
            stats.addValue(distance);
        }

        double percentileDistance = stats.getPercentile(percentileValue);
        percentilesMap.put(clusterIndex, percentileDistance);
        stats.clear();
    }

    return percentilesMap;
}

From source file:org.wso2.carbon.ml.core.spark.models.ext.AnomalyDetectionModel.java

/**
 * This method is to get the percentile distance to a given cluster
 *//*from  w w w  .  j av a 2  s .  c o m*/
private double getPercentileDistance(double percentileValue, int clusterIndex) {

    // Get a DescriptiveStatistics instance
    DescriptiveStatistics stats = new DescriptiveStatistics();

    // calculating percentile distance
    for (double distance : clusterIndexToDistancesListMap.get(clusterIndex)) {
        stats.addValue(distance);
    }
    double percentileDistance = stats.getPercentile(percentileValue);
    stats.clear();

    return percentileDistance;
}

From source file:org.wso2.carbon.ml.database.internal.MLDatabaseService.java

/**
 * Create the JSON string with summary statistics for a column.
 *
 * @param type Data-type of the column//from   w w w  .j a v  a2s . co m
 * @param graphFrequencies Bin frequencies of the column
 * @param missing Number of missing values in the column
 * @param unique Number of unique values in the column
 * @param descriptiveStats DescriptiveStats object of the column
 * @return JSON representation of the summary statistics of the column
 */
private JSONArray createJson(String type, SortedMap<?, Integer> graphFrequencies, int missing, int unique,
        DescriptiveStatistics descriptiveStats) throws JSONException {

    JSONObject json = new JSONObject();
    JSONArray freqs = new JSONArray();
    Object[] categoryNames = graphFrequencies.keySet().toArray();
    // Create an array with intervals/categories and their frequencies.
    for (int i = 0; i < graphFrequencies.size(); i++) {
        JSONArray temp = new JSONArray();
        temp.put(categoryNames[i].toString());
        temp.put(graphFrequencies.get(categoryNames[i]));
        freqs.put(temp);
    }
    // Put the statistics to a json object
    json.put("unique", unique);
    json.put("missing", missing);

    DecimalFormat decimalFormat = new DecimalFormat("#.###");
    if (descriptiveStats.getN() != 0) {
        json.put("mean", decimalFormat.format(descriptiveStats.getMean()));
        json.put("min", decimalFormat.format(descriptiveStats.getMin()));
        json.put("max", decimalFormat.format(descriptiveStats.getMax()));
        json.put("median", decimalFormat.format(descriptiveStats.getPercentile(50)));
        json.put("std", decimalFormat.format(descriptiveStats.getStandardDeviation()));
        if (type.equalsIgnoreCase(FeatureType.NUMERICAL)) {
            json.put("skewness", decimalFormat.format(descriptiveStats.getSkewness()));
        }
    }
    json.put("values", freqs);
    json.put("bar", true);
    json.put("key", "Frequency");
    JSONArray summaryStatArray = new JSONArray();
    summaryStatArray.put(json);
    return summaryStatArray;
}

From source file:org.wso2.carbon.ml.dataset.internal.DatabaseHandler.java

/**
 * Create the JSON string with summary statistics for a column.
 *
 * @param type              Data-type of the column
 * @param graphFrequencies  Bin frequencies of the column
 * @param missing           Number of missing values in the column
 * @param unique            Number of unique values in the column
 * @param descriptiveStats  DescriptiveStats object of the column
 * @return                  JSON representation of the summary statistics of the column
 *//*from   w  w  w .j a va2s  .c om*/
private JSONArray createJson(String type, SortedMap<?, Integer> graphFrequencies, int missing, int unique,
        DescriptiveStatistics descriptiveStats) {
    JSONObject json = new JSONObject();
    JSONArray freqs = new JSONArray();
    Object[] categoryNames = graphFrequencies.keySet().toArray();
    // Create an array with intervals/categories and their frequencies.
    for (int i = 0; i < graphFrequencies.size(); i++) {
        JSONArray temp = new JSONArray();
        temp.put(categoryNames[i].toString());
        temp.put(graphFrequencies.get(categoryNames[i]));
        freqs.put(temp);
    }
    // Put the statistics to a json object
    json.put("unique", unique);
    json.put("missing", missing);

    DecimalFormat decimalFormat = new DecimalFormat("#.###");
    if (descriptiveStats.getN() != 0) {
        json.put("mean", decimalFormat.format(descriptiveStats.getMean()));
        json.put("median", decimalFormat.format(descriptiveStats.getPercentile(50)));
        json.put("std", decimalFormat.format(descriptiveStats.getStandardDeviation()));
        if (type.equalsIgnoreCase(FeatureType.NUMERICAL)) {
            json.put("skewness", decimalFormat.format(descriptiveStats.getSkewness()));
        }
    }
    json.put("values", freqs);
    json.put("bar", true);
    json.put("key", "Frequency");
    JSONArray summaryStatArray = new JSONArray();
    summaryStatArray.put(json);
    return summaryStatArray;
}

From source file:org.wso2.extension.siddhi.execution.var.backtest.BacktestIncremental.java

public void runTest() throws FileNotFoundException {

    Formatter formatter = new Formatter(new File("MonteCarloBacktestResults.csv"));
    formatter.format("%s%n", "date,varclose,varavg,varmax,corrloss,varmedian,varmode,lossclose,lossavg,lossmax,"
            + "corrvar,lossmedian,lossmode");
    String[] dates = { "jan-23", "jan-24", "jan-25", "jan-26", "jan-27", "jan-30", "jan-31", "feb-1", "feb-2",
            "feb-3" };
    String write = "%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s";
    //        VaRCalculator varCalculator = new HistoricalVaRCalculator(BATCH_SIZE, VAR_CI);
    VaRCalculator varCalculator = new ParametricVaRCalculator(BATCH_SIZE, VAR_CI);
    //        VaRCalculator varCalculator = new MonteCarloVarCalculator(BATCH_SIZE, VAR_CI, 2500, 100, 0.01);

    Map<String, Integer> assets = initPortfolio();
    Portfolio portfolio = varCalculator.createPortfolio("1", assets);
    varCalculator.addPortfolio("1", portfolio);

    for (int d = START_DATE; d <= END_DATE; d++) {
        System.out.println("\nDAY : " + dates[d - START_DATE] + "\n");
        ArrayList<Event> list = readBacktestData(d);
        HashMap<Integer, Double> varMap = new HashMap();
        HashMap<Integer, Double> lossMap = new HashMap();
        double var = 0;
        double loss = 0;
        String corrVar;/*from  w ww  .  j a  v  a2 s  . c om*/
        String corrLoss;

        int counter = 0;
        for (int i = 0; i < list.size(); i++) {

            //System.out.print("Event " + (i + 1) + " : ");
            String jsonString = (String) varCalculator.calculateValueAtRisk(list.get(i));
            currentPortfolioValue = portfolio.getTotalPortfolioValue();

            if (jsonString != null) {

                JSONObject jsonObject = new JSONObject(jsonString);
                double tempVar = (Double) jsonObject.get(PORTFOLIO_KEY); // hardcoded for portfolio ID 1
                if (tempVar < 0) {
                    var = tempVar;
                    varMap.put(counter, var);
                    //System.out.printf("Var : %.4f", tempVar);
                }

                double tempLoss = currentPortfolioValue - previousPortfolioValue;
                if (tempLoss < 0) {
                    loss = tempLoss;
                    lossMap.put(counter, loss);
                    //System.out.printf(" Loss : %.4f", tempLoss);
                }
                counter++;
            }

            previousPortfolioValue = currentPortfolioValue;
            //System.out.println();
        }

        double[] vars = Stream.of(varMap.values().toArray(new Double[varMap.size()]))
                .mapToDouble(Double::doubleValue).toArray();
        DescriptiveStatistics statVar = new DescriptiveStatistics(vars);

        double[] losses = Stream.of(lossMap.values().toArray(new Double[lossMap.size()]))
                .mapToDouble(Double::doubleValue).toArray();
        DescriptiveStatistics statLoss = new DescriptiveStatistics(losses);

        System.out.println("Daily VaR CLOSE  : " + var);
        System.out.println("Daily VaR AVG    : " + statVar.getMean());

        Double min = statVar.getMin();
        System.out.println("Daily VaR MAX    : " + min);

        Integer minIndex = null;
        for (Map.Entry<Integer, Double> e : varMap.entrySet()) {
            if (e.getValue().equals(min)) {
                minIndex = e.getKey();
            }
        }

        if (lossMap.get(minIndex) == null) {
            corrLoss = "NO LOSS";
        } else {
            corrLoss = lossMap.get(minIndex).toString();
        }

        System.out.println("Crspng Loss      : " + corrLoss);
        System.out.println("Daily VaR MEDIAN : " + statVar.getPercentile(50));
        System.out.println("Daily VaR MODE   : " + mode(statVar.getValues()));

        System.out.println();

        System.out.println("Daily Loss CLOSE   : " + loss);
        System.out.println("Daily Loss AVG     : " + statLoss.getMean());

        min = statLoss.getMin();
        System.out.println("Daily Loss MAX     : " + min);

        for (Map.Entry<Integer, Double> e : lossMap.entrySet()) {
            if (e.getValue().equals(min)) {
                minIndex = e.getKey();
            }
        }

        if (varMap.get(minIndex) == null) {
            corrVar = "NO VAR";
        } else {
            corrVar = varMap.get(minIndex).toString();
        }

        System.out.println("Crspng VaR         : " + corrVar);
        System.out.println("Daily Loss MEDIAN  : " + statLoss.getPercentile(50));
        System.out.println("Daily Loss MODE    : " + mode(statLoss.getValues()));

        formatter.format("%s%n",
                String.format(write, dates[d - START_DATE], var, statVar.getMean(), statVar.getMin(), corrLoss,
                        statVar.getPercentile(50), mode(statVar.getValues()), loss, statLoss.getMean(),
                        statLoss.getMin(), corrVar, statLoss.getPercentile(50), mode(statLoss.getValues())));

    }

    formatter.close();
}

From source file:org.wso2.extension.siddhi.execution.var.models.historical.HistoricalVaRCalculator.java

/**
 * @return the var of the portfolio Calculate the contribution of the changed asset to the portfolio loss and then
 * adjust the previous loss value using it. A distribution is constructed using those loss values and the ultimate
 * VaR value is obtained.//  w  ww.  ja  v a  2s .c  o  m
 */
@Override
public Double processData(Portfolio portfolio, Event event) {
    HistoricalPortfolio historicalPortfolio = (HistoricalPortfolio) portfolio;
    String symbol = event.getSymbol();
    HistoricalAsset asset = (HistoricalAsset) getAssetPool().get(symbol);
    double[] currentSimulatedPriceList = asset.getCurrentSimulatedPriceList();
    double[] cumulativeLossValues = historicalPortfolio.getCumulativeLossValues();

    //there should be at least one return value
    if (asset.getNumberOfReturnValues() > 0) {
        DescriptiveStatistics descriptiveStatistics = new DescriptiveStatistics(
                currentSimulatedPriceList.length);

        //first time for the portfolio
        if (cumulativeLossValues == null) {
            cumulativeLossValues = new double[getBatchSize() - 1];
        }

        double previousSimulatedPriceList[] = asset.getPreviousSimulatedPriceList();
        int previousQty = portfolio.getPreviousAssetQuantities(symbol);
        int currentQty = portfolio.getCurrentAssetQuantities(symbol);

        //incrementally calculate the cumulative loss value
        //new cumulative loss value = previous loss value + adjustment
        for (int i = 0; i < currentSimulatedPriceList.length; i++) {
            //2nd time for the portfolio
            if (i < previousSimulatedPriceList.length) {
                cumulativeLossValues[i] = cumulativeLossValues[i]
                        - (previousSimulatedPriceList[i] * previousQty);
            }

            //incrementally calculate the cumulative loss value
            cumulativeLossValues[i] += (currentSimulatedPriceList[i] * currentQty);
            descriptiveStatistics.addValue(cumulativeLossValues[i]);
        }

        historicalPortfolio.setCumulativeLossValues(cumulativeLossValues);
        return descriptiveStatistics.getPercentile((1 - getConfidenceInterval()) * 100);
    }
    return null;
}

From source file:reflex.module.ReflexStatistics.java

public ReflexValue statistics(List<ReflexValue> params) {
    if (params.size() != 1) {
        throw new ReflexException(-1, "statistics needs one list parameter");
    }/*  www  .  j av  a 2  s .c o  m*/
    if (!params.get(0).isList()) {
        throw new ReflexException(-1, "statistics needs one list parameter");
    }
    DescriptiveStatistics stats = new DescriptiveStatistics();
    List<ReflexValue> values = params.get(0).asList();
    for (ReflexValue v : values) {
        stats.addValue(v.asDouble());
    }
    Map<String, ReflexValue> ret = new HashMap<String, ReflexValue>();
    ret.put("mean", new ReflexValue(stats.getMean()));
    ret.put("std", new ReflexValue(stats.getStandardDeviation()));
    ret.put("median", new ReflexValue(stats.getPercentile(50)));
    return new ReflexValue(ret);
}

From source file:sandbox.sfwatergit.peerinfluence.io.TXTWriter.java

public static void writeStatistics(TDoubleObjectHashMap<DescriptiveStatistics> statsMap, String xLab,
        String file) throws IOException {
    double[] keys = statsMap.keys();
    Arrays.sort(keys);/*from   w w  w  .ja va 2  s . c om*/

    BufferedWriter writer = new BufferedWriter(new FileWriter(file));

    writer.write(xLab);
    writer.write(TAB);
    writer.write("mean");
    writer.write(TAB);
    writer.write("median");
    writer.write(TAB);
    writer.write("min");
    writer.write(TAB);
    writer.write("max");
    writer.write(TAB);
    writer.write("n");
    writer.newLine();

    for (double key : keys) {
        DescriptiveStatistics stats = statsMap.get(key);

        writer.write(String.valueOf(key));
        writer.write(TAB);
        writer.write(String.valueOf(stats.getMean()));
        writer.write(TAB);
        writer.write(String.valueOf(stats.getPercentile(50)));
        writer.write(TAB);
        writer.write(String.valueOf(stats.getMin()));
        writer.write(TAB);
        writer.write(String.valueOf(stats.getMax()));
        writer.write(TAB);
        writer.write(String.valueOf(stats.getN()));
        writer.newLine();
    }

    writer.close();
}