Example usage for org.apache.commons.math3.stat.descriptive.rank Percentile setData

List of usage examples for org.apache.commons.math3.stat.descriptive.rank Percentile setData

Introduction

In this page you can find the example usage for org.apache.commons.math3.stat.descriptive.rank Percentile setData.

Prototype

@Override
public void setData(final double[] values) 

Source Link

Usage

From source file:com.mgmtp.perfload.perfalyzer.binning.MeasuringResponseTimesBinningStrategy.java

@Override
public void aggregateData(final ChannelManager channelManager) throws IOException {
    WritableByteChannel quantilesChannel = channelManager.getChannel("quantiles");
    writeQuantilesHeader(quantilesChannel);

    int i = 0;/* w  ww . j  a v  a2  s. co  m*/
    for (Entry<String, UriMeasurings> entry : measuringsMap.entrySet()) {
        UriMeasurings measurings = entry.getValue();
        String uri = measurings.uriAlias;
        if (measurings.responseTimes.isEmpty()) {
            continue;
        }

        Percentile percentile = new Percentile();
        double[] responseTimes = Doubles.toArray(measurings.responseTimes);
        percentile.setData(responseTimes);

        // each uri is mapped to a key which is simple a number that is left-padded for better sorting
        String mappingKey = leftPad(String.valueOf(i++), 3, '0');

        StrBuilder sb = new StrBuilder(150);
        appendEscapedAndQuoted(sb, DELIMITER, mappingKey);
        appendEscapedAndQuoted(sb, DELIMITER, measurings.type);
        appendEscapedAndQuoted(sb, DELIMITER, uri);
        appendEscapedAndQuoted(sb, DELIMITER, intNumberFormat.format(responseTimes.length));
        appendEscapedAndQuoted(sb, DELIMITER, intNumberFormat.format(measurings.errorCount));
        appendEscapedAndQuoted(sb, DELIMITER, intNumberFormat.format(Doubles.min(responseTimes)));
        appendEscapedAndQuoted(sb, DELIMITER, intNumberFormat.format(percentile.evaluate(10d)));
        appendEscapedAndQuoted(sb, DELIMITER, intNumberFormat.format(percentile.evaluate(50d)));
        appendEscapedAndQuoted(sb, DELIMITER, intNumberFormat.format(percentile.evaluate(90d)));
        appendEscapedAndQuoted(sb, DELIMITER, intNumberFormat.format(Doubles.max(responseTimes)));
        writeLineToChannel(quantilesChannel, sb.toString(), Charsets.UTF_8);

        // write response time distributions
        WritableByteChannel distributionChannel = channelManager.getChannel("distribution_" + mappingKey);
        writeDistributionHeader(distributionChannel);

        for (Entry<Long, MutableInt> e : measurings.responseDistributions.entrySet()) {
            sb = new StrBuilder();
            appendEscapedAndQuoted(sb, DELIMITER, e.getKey());
            appendEscapedAndQuoted(sb, DELIMITER, intNumberFormat.format(e.getValue()));
            writeLineToChannel(distributionChannel, sb.toString(), Charsets.UTF_8);
        }
    }

    writeExecutionAggregatedResponseTimesHeader(channelManager.getChannel("aggregatedResponseTimes"));
    if (!perExecutionResponseTimes.isEmpty()) {
        BinManager executionsPerMinuteBinManager = new BinManager(startOfFirstBin,
                PerfAlyzerConstants.BIN_SIZE_MILLIS_1_MINUTE);
        BinManager executionsPerTenMinutesBinManager = new BinManager(startOfFirstBin,
                PerfAlyzerConstants.BIN_SIZE_MILLIS_10_MINUTES);
        BinManager medianExecutionBinManager = new BinManager(startOfFirstBin,
                PerfAlyzerConstants.BIN_SIZE_MILLIS_30_SECONDS);

        List<ExecutionMeasurings> values = newArrayList(perExecutionResponseTimes.values());

        for (ExecutionMeasurings execMeasurings : values) {
            long timestampMillis = execMeasurings.timestampMillis;
            executionsPerMinuteBinManager.addValue(timestampMillis);
            executionsPerTenMinutesBinManager.addValue(timestampMillis);
            medianExecutionBinManager.addValue(timestampMillis,
                    execMeasurings.sumResponseTimes.doubleValue() / 1000);
        }

        executionsPerMinuteBinManager.toCsv(channelManager.getChannel("execMin"), "time", "count",
                intNumberFormat);
        executionsPerTenMinutesBinManager.toCsv(channelManager.getChannel("exec10Min"), "time", "count",
                intNumberFormat);
        medianExecutionBinManager.toCsv(channelManager.getChannel("executions"), "time", "median",
                intNumberFormat, AggregationType.MEDIAN);

        double[] sumResponseTimes = values.stream().mapToDouble(input -> input.sumResponseTimes.doubleValue())
                .toArray();

        StrBuilder sb = new StrBuilder(150);
        appendEscapedAndQuoted(sb, DELIMITER, intNumberFormat.format(Doubles.min(sumResponseTimes) / 1000));
        appendEscapedAndQuoted(sb, DELIMITER,
                intNumberFormat.format(StatUtils.percentile(sumResponseTimes, 50d) / 1000));
        appendEscapedAndQuoted(sb, DELIMITER, intNumberFormat.format(Doubles.max(sumResponseTimes) / 1000));
        writeLineToChannel(channelManager.getChannel("aggregatedResponseTimes"), sb.toString(), Charsets.UTF_8);
    }
}

From source file:dbseer.comp.process.transaction.TransactionLogWriter.java

public void writeLog(long timestamp, Collection<Transaction> transactions) throws Exception {
    if (!this.isInitialized) {
        throw new Exception("TransactionLogWriter not initialized.");
    }//from   www. jav a2 s  .  c o  m

    double totalCount = 0;
    double[][] count = new double[numServer][DBSeerConstants.MAX_NUM_TABLE];
    double[][] latencySum = new double[numServer][DBSeerConstants.MAX_NUM_TABLE];
    String gap = "   ";

    if (!dbscan.isInitialized() && !dbscan.isInitializing()) {
        initialTransactions.addAll(transactions);

        if (initialTransactions.size() > dbscan.getInitPts() && !dbscan.isInitializing()) {
            dbscanInitializer = Executors.newSingleThreadExecutor();
            dbscanInitializer.submit(new Runnable() {
                @Override
                public void run() {
                    dbscan.initialDBSCAN(initialTransactions);
                }
            });
        }
    }

    for (Transaction t : transactions) {
        if (dbscan != null && dbscan.isInitialized()) {
            if (liveLogProcessor.getTxStartTime() == 0) {
                liveLogProcessor.setTxStartTime(timestamp);
            }
            dbscan.train(t);
        }

        int type;
        if (t.getCluster() == null) {
            type = 0;
        } else {
            type = t.getCluster().getId();
        }

        if (type > maxType) {
            maxType = type;
        }

        // if not outlier;
        if (type >= 0) {
            String server = t.getServerName();
            int index = serverIndex.get(server);
            latencySum[index][type] += t.getLatency();
            count[index][type]++;
            totalCount++;

            ArrayList<Double> latencyList = writers.get(server).getLatencyMap().get(type);
            if (latencyList == null) {
                latencyList = new ArrayList<Double>();
                writers.get(server).getLatencyMap().put(type, latencyList);
            }
            latencyList.add((double) t.getLatency());

            // write sample
            HashMap<Integer, Integer> countMap = writers.get(server).getTransactionSampleCountMap();
            Integer sampleCount = countMap.get(type);
            if (sampleCount == null) {
                countMap.put(type, 1);
            } else {
                int countVal = sampleCount.intValue();
                if (countVal < DBSeerConstants.MAX_TRANSACTION_SAMPLE) {
                    HashMap<Integer, PrintWriter> sampleWriters = writers.get(server)
                            .getTransactionSampleWriter();
                    PrintWriter sampleWriter = sampleWriters.get(type);
                    if (sampleWriter == null) {
                        sampleWriter = new PrintWriter(new FileOutputStream(String.format("%s%d",
                                this.dir + File.separator + server + File.separator + "tx_sample_", type),
                                false));
                        sampleWriters.put(type, sampleWriter);
                    }
                    sampleWriter.print(t.getEntireStatement());
                    sampleWriter.println("---");
                    sampleWriter.flush();
                    countVal++;
                    countMap.put(type, countVal);
                }
            }
        }
    }

    // update live monitor
    if (monitor != null) {
        monitor.setCurrentTimestamp(timestamp);
        monitor.setNumTransactionTypes(maxType + 1);
        monitor.setGlobalTransactionCount(totalCount);

        for (int i = 0; i <= maxType; ++i) {
            double countSum = 0;
            double latencySumSum = 0;
            for (int j = 0; j < numServer; ++j) {
                countSum += count[j][i];
                latencySumSum += latencySum[j][i];
            }
            monitor.setCurrentTPS(i, countSum);
            if (countSum == 0) {
                monitor.setCurrentAverageLatency(i, 0.0);
            } else {
                monitor.setCurrentAverageLatency(i, latencySumSum / countSum);
            }
        }
    }

    if (timestamp < liveLogProcessor.getSysStartTime() || liveLogProcessor.getSysStartTime() == 0) {
        return;
    }

    for (String server : servers) {
        TransactionWriter writer = writers.get(server);
        PrintWriter tpsWriter = writer.getTpsWriter();
        PrintWriter latencyWriter = writer.getLatencyWriter();

        HashMap<Integer, PrintWriter> prctileLatencyWriter = writer.getPrctileLatencyWriter();
        HashMap<Integer, ArrayList<Double>> latencyMap = writer.getLatencyMap();

        tpsWriter.print(gap);
        latencyWriter.print(gap);

        tpsWriter.printf("%.16e", (double) timestamp);
        latencyWriter.printf("%.16e", (double) timestamp);

        int index = serverIndex.get(server);

        for (int i = 0; i <= maxType; ++i) {
            tpsWriter.print(gap);
            tpsWriter.printf("%.16e", count[index][i]);

            latencyWriter.print(gap);
            if (count[index][i] == 0.0) {
                latencyWriter.printf("%.16e", 0.0);
            } else {
                latencyWriter.printf("%.16e", (latencySum[index][i] / count[index][i]) / 1000.0);
            }

            // write percentile
            PrintWriter prctileWriter = prctileLatencyWriter.get(i);
            ArrayList<Double> latencyList = latencyMap.get(i);
            if (latencyList == null) {
                latencyList = new ArrayList<Double>();
                latencyMap.put(i, latencyList);
            }
            if (prctileWriter == null) {
                prctileWriter = new PrintWriter(new FileOutputStream(
                        String.format("%s%03d",
                                this.dir + File.separator + server + File.separator + "prctile_latency_", i),
                        false));
                prctileLatencyWriter.put(i, prctileWriter);
            }
            double[] latencies = Doubles.toArray(latencyList);
            prctileWriter.printf("%d,", timestamp);
            for (double p : percentiles) {
                Percentile percentile = new Percentile(p);
                percentile.setData(latencies);
                double val = percentile.evaluate();
                if (Double.isNaN(val))
                    val = 0.0;
                prctileWriter.printf("%f,", val / 1000.0);
            }
            prctileWriter.println();
            prctileWriter.flush();
            latencyList.clear();
        }

        tpsWriter.println();
        latencyWriter.println();
        tpsWriter.flush();
        latencyWriter.flush();
        isWritingStarted = true;
    }
}

From source file:dbseer.comp.live.LiveTransactionProcessor.java

@Override
public void run() {
    try {/* w  ww.j  a va 2  s .  co  m*/
        this.transactionCountWriter = new PrintWriter(new FileWriter(this.transactionCountFile, true));
        this.avgLatencyWriter = new PrintWriter(new FileWriter(this.avgLatencyFile, true));
    } catch (IOException e) {
        e.printStackTrace();
    }

    long time;
    // wait for transactions to come in
    while (true) {
        time = map.getMinEndTime();
        if (time != Long.MAX_VALUE) {
            break;
        } else {
            try {
                Thread.sleep(250);
            } catch (InterruptedException e) {
                if (!terminate) {
                    e.printStackTrace();
                } else {
                    return;
                }
            }
        }
        if (terminate) {
            break;
        }
    }

    String gap = "   ";
    double totalCount = 0;
    double currentCount = 0;
    double[] count = new double[DBSeerConstants.MAX_NUM_TABLE];
    double[] latencySum = new double[DBSeerConstants.MAX_NUM_TABLE];
    int maxClusterId = 0;
    long transCount = 0;

    // start processing transactions
    while (true) {
        long maxTime, maxClusterEndTime;
        maxTime = map.getMaxEndTime();
        if (!StreamClustering.getDBSCAN().isInitialized() && transCount < DBSeerConstants.DBSCAN_INIT_PTS) {
            transCount = map.getCount();
            monitor.setGlobalTransactionCount(transCount);
            try {
                Thread.sleep(250);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }
        //         synchronized (StreamClustering.LOCK)
        try {
            StreamClustering.LOCK.lockInterruptibly();
            {
                maxClusterEndTime = StreamClustering.getDBSCAN().getMaxEndTime();
            }
            StreamClustering.LOCK.unlock();
            while (time < maxTime && time < maxClusterEndTime) {
                currentCount = 0;
                Set<Transaction> transactions = map.pollTransactions(time);

                // if no transactions for the time, skip to the next timestamp.
                if (transactions.isEmpty()) {
                    ++time;
                    continue;
                }

                // if sys log not available for the time, also skip to the next timestamp;
                if (map.getMinSysLogTime() != Long.MAX_VALUE && map.getMinSysLogTime() > time) {
                    ++time;
                    continue;
                }

                boolean monitorLogFound = true;
                String monitorLog;
                while ((monitorLog = map.getSysLog(time)) == null) {
                    if (time < map.getLastSysLogTime()) {
                        monitorLogFound = false;
                        break;
                    }
                    try {
                        Thread.sleep(100);
                    } catch (InterruptedException e) {
                        if (!terminate) {
                            e.printStackTrace();
                        } else {
                            return;
                        }
                    }
                }

                if (!monitorLogFound) {
                    ++time;
                    continue;
                }

                monitorWriter.println(monitorLog);
                monitorWriter.flush();

                for (Transaction t : transactions) {
                    Cluster c = t.getCluster();
                    // if cluster is null, skip
                    if (c == null) {
                        continue;
                    }

                    int cId = c.getId();
                    long latency = t.getLatency();

                    // ignore outliers
                    if (cId >= 0) {
                        latencySum[cId] += latency;
                        ++count[cId];
                        ++totalCount;
                        ++currentCount;

                        ArrayList<Double> latencyList = latencyMap.get(cId);
                        if (latencyList == null) {
                            latencyList = new ArrayList<Double>();
                            latencyMap.put(cId, latencyList);
                        }
                        latencyList.add((double) latency / 1000.0);
                    }
                    if (cId > maxClusterId) {
                        maxClusterId = cId;
                    }
                }

                // update live monitor
                //               int numTrans = maxClusterId + 1;
                int numTrans = StreamClustering.getDBSCAN().getAllClusters().size();
                synchronized (LiveMonitorInfo.LOCK) {
                    monitor.setCurrentTimestamp(time);
                    monitor.setNumTransactionTypes(numTrans);
                    monitor.setGlobalTransactionCount(totalCount);
                    for (int i = 0; i < numTrans; ++i) {
                        monitor.setCurrentTPS(i, count[i]);
                        if (count[i] == 0) {
                            monitor.setCurrentAverageLatency(i, 0.0);
                        } else {
                            monitor.setCurrentAverageLatency(i, latencySum[i] / count[i]);
                        }
                    }
                }

                transactionCountWriter.print(gap);
                avgLatencyWriter.print(gap);

                transactionCountWriter.printf("%.16e", (double) time);
                avgLatencyWriter.printf("%.16e", (double) time);

                for (int i = 0; i < numTrans; ++i) {
                    transactionCountWriter.print(gap);
                    transactionCountWriter.printf("%.16e", count[i]);
                    avgLatencyWriter.print(gap);
                    if (count[i] == 0.0) {
                        avgLatencyWriter.printf("%.16e", 0.0);
                    } else {
                        avgLatencyWriter.printf("%.16e", (latencySum[i] / (double) count[i] / 1000.0));
                    }
                    count[i] = 0;
                    latencySum[i] = 0;

                    // write percentile
                    PrintWriter writer = percentileLatencyWriter.get(i);
                    ArrayList<Double> latencyList = latencyMap.get(i);
                    if (latencyList == null) {
                        latencyList = new ArrayList<Double>();
                        latencyMap.put(i, latencyList);
                    }
                    if (writer == null) {
                        try {
                            writer = new PrintWriter(new FileOutputStream(String.format("%s%03d",
                                    DBSeerGUI.userSettings.getDBSeerRootPath() + File.separator
                                            + DBSeerConstants.LIVE_DATASET_PATH + File.separator
                                            + "prctile_latency_",
                                    i), true));
                        } catch (FileNotFoundException e) {
                            e.printStackTrace();
                        }
                        percentileLatencyWriter.put(i, writer);
                    }

                    double[] latencies = Doubles.toArray(latencyList);
                    writer.printf("%d,", time);
                    for (double p : percentiles) {
                        Percentile percentile = new Percentile(p);
                        percentile.setData(latencies);
                        double val = percentile.evaluate();
                        if (Double.isNaN(val))
                            val = 0.0;
                        writer.printf("%f,", val);
                    }
                    writer.println();
                    writer.flush();
                }

                transactionCountWriter.println();
                avgLatencyWriter.println();
                transactionCountWriter.flush();
                avgLatencyWriter.flush();

                //            System.out.print((maxClusterId + 1) + ": ");
                //            for (int i = 0; i <= maxClusterId; ++i)
                //            {
                //               System.out.print(count[i] + ", ");
                //               count[i] = 0;
                //            }
                //            System.out.println();
                //            ArrayList<Cluster> clusters = (ArrayList<Cluster>)StreamClustering.getDBSCAN().getCurrentClusters();
                //            for (int i = 0; i < clusters.size(); ++i)
                //            {
                //               Cluster c1 = clusters.get(i);
                //               for (int j = 0; j < clusters.size(); ++j)
                //               {
                //                  Cluster c2 = clusters.get(j);
                //                  System.out.print(c1.getClusterDistance(c2) + " ");
                //               }
                //               System.out.println();
                //            }
                //            System.out.println("----");
                // is it correct to set it here?
                DBSeerGUI.isLiveDataReady = true;

                ++time;
            }

            if (terminate) {
                break;
            }

            Thread.sleep(100);
        } catch (InterruptedException e) {
            if (!terminate) {
                e.printStackTrace();
            } else {
                return;
            }
        }
    }
}

From source file:ch.unil.genescore.pathway.GeneSetLibrary.java

License:asdf

public void computeApproxChi2Values() {

    Percentile perc = new Percentile();
    EffTestCalculator effCalc = new EffTestCalculator();
    effCalc.addGeneIds(genes_);//from   w w  w . j  a  v a  2  s.c  om
    effCalc.addMetaGeneIds(metaGenes_);
    effCalc.setGeneSets(geneSets_);
    effCalc.initializeGeneVals();

    int nrOfLoops = 10000;
    double[] mins = new double[nrOfLoops];
    for (int i = 0; i < nrOfLoops; i++) {
        mins[i] = effCalc.calcMinVal();
    }
    perc.setData(mins);
    double p1 = perc.evaluate(1);
    double p5 = perc.evaluate(5);
    double p10 = perc.evaluate(10);
    double p15 = perc.evaluate(15);
    //double w1 = perc.evaluate(1.0/50);
    System.out.println(p1);
    System.out.println(p5);
    System.out.println(p10);
    System.out.println(p15);
    System.out.println("asdf");

}

From source file:org.apache.mahout.classifier.df.split.OptIgSplit.java

/**
 * @return an array of values to split the numeric feature's values on when
 *  building candidate splits. When input size is <= MAX_NUMERIC_SPLITS + 1, it will
 *  return the averages between success values as split points. When larger, it will
 *  return MAX_NUMERIC_SPLITS approximate percentiles through the data.
 *//*ww w .j  a  va2 s  . c om*/
private static double[] chooseNumericSplitPoints(double[] values) {
    if (values.length <= 1) {
        return values;
    }
    if (values.length <= MAX_NUMERIC_SPLITS + 1) {
        double[] splitPoints = new double[values.length - 1];
        for (int i = 1; i < values.length; i++) {
            splitPoints[i - 1] = (values[i] + values[i - 1]) / 2.0;
        }
        return splitPoints;
    }
    Percentile distribution = new Percentile();
    distribution.setData(values);
    double[] percentiles = new double[MAX_NUMERIC_SPLITS];
    for (int i = 0; i < percentiles.length; i++) {
        double p = 100.0 * ((i + 1.0) / (MAX_NUMERIC_SPLITS + 1.0));
        percentiles[i] = distribution.evaluate(p);
    }
    return percentiles;
}

From source file:org.apache.solr.client.solrj.io.eval.PercentileEvaluator.java

@Override
public Object doWork(Object first, Object second) throws IOException {
    if (null == first) {
        throw new IOException(String.format(Locale.ROOT,
                "Invalid expression %s - null found for the first value", toExpression(constructingFactory)));
    }//from   ww w .  j  a  va2 s .  c om
    if (null == second) {
        throw new IOException(String.format(Locale.ROOT,
                "Invalid expression %s - null found for the second value", toExpression(constructingFactory)));
    }
    if (!(first instanceof List<?>)) {
        throw new IOException(String.format(Locale.ROOT,
                "Invalid expression %s - found type %s for the first value, expecting a List",
                toExpression(constructingFactory), first.getClass().getSimpleName()));
    }
    if (!(second instanceof Number)) {
        throw new IOException(String.format(Locale.ROOT,
                "Invalid expression %s - found type %s for the second value, expecting a Number",
                toExpression(constructingFactory), first.getClass().getSimpleName()));
    }

    Percentile percentile = new Percentile();
    percentile
            .setData(((List<?>) first).stream().mapToDouble(value -> ((Number) value).doubleValue()).toArray());
    return percentile.evaluate(((Number) second).doubleValue());
}

From source file:org.drugis.mtc.summary.QuantileSummary.java

private Percentile getSamples() {
    List<Double> list = SummaryUtil.getAllChainsLastHalfSamples(d_results, d_parameter);
    double[] arr = new double[list.size()];
    for (int i = 0; i < list.size(); ++i) {
        arr[i] = list.get(i);//from   w  ww  .  j a  v a  2  s .c o  m
    }

    Percentile percentile = new Percentile();
    percentile.setData(arr);
    return percentile;
}

From source file:org.hawkular.metrics.core.impl.cassandra.GaugeBucketedOutputMapper.java

@Override
protected GaugeBucketDataPoint newPointInstance(long from, long to, List<GaugeData> gaugeDatas) {
    double[] values = new double[gaugeDatas.size()];
    for (ListIterator<GaugeData> iterator = gaugeDatas.listIterator(); iterator.hasNext();) {
        GaugeData gaugeData = iterator.next();
        values[iterator.previousIndex()] = gaugeData.getValue();
    }/* ww w  .ja  va 2 s. c o  m*/

    Percentile percentile = new Percentile();
    percentile.setData(values);

    return new GaugeBucketDataPoint.Builder(from, to).setMin(new Min().evaluate(values))
            .setAvg(new Mean().evaluate(values)).setMedian(percentile.evaluate(50.0))
            .setMax(new Max().evaluate(values)).setPercentile95th(percentile.evaluate(95.0)).build();
}

From source file:org.hawkular.metrics.core.impl.cassandra.NumericBucketedOutputMapper.java

@Override
protected NumericBucketDataPoint newPointInstance(long from, long to, List<NumericData> numericDatas) {
    double[] values = new double[numericDatas.size()];
    for (ListIterator<NumericData> iterator = numericDatas.listIterator(); iterator.hasNext();) {
        NumericData numericData = iterator.next();
        values[iterator.previousIndex()] = numericData.getValue();
    }//  w w w.  ja va 2  s  .  c om

    Percentile percentile = new Percentile();
    percentile.setData(values);

    return new NumericBucketDataPoint.Builder(from, to).setMin(new Min().evaluate(values))
            .setAvg(new Mean().evaluate(values)).setMedian(percentile.evaluate(50.0))
            .setMax(new Max().evaluate(values)).setPercentile95th(percentile.evaluate(95.0)).build();
}

From source file:org.hawkular.metrics.core.impl.GaugeBucketedOutputMapper.java

@Override
protected GaugeBucketDataPoint newPointInstance(long from, long to, List<DataPoint<Double>> dataPoints) {
    double[] values = new double[dataPoints.size()];
    for (ListIterator<DataPoint<Double>> iterator = dataPoints.listIterator(); iterator.hasNext();) {
        DataPoint<Double> gaugeData = iterator.next();
        values[iterator.previousIndex()] = gaugeData.getValue();
    }/*from www .j a  v a 2 s  . c  o m*/

    Percentile percentile = new Percentile();
    percentile.setData(values);

    return new GaugeBucketDataPoint.Builder(from, to).setMin(new Min().evaluate(values))
            .setAvg(new Mean().evaluate(values)).setMedian(percentile.evaluate(50.0))
            .setMax(new Max().evaluate(values)).setPercentile95th(percentile.evaluate(95.0)).build();
}