List of usage examples for org.apache.commons.math3.stat.descriptive.rank Percentile Percentile
public Percentile(Percentile original) throws NullArgumentException
From source file:aos.creditassignment.setimprovement.MedianIndicatorImprovement.java
/** * Constructor for indicator based set improvement credit assignment */// w w w .ja v a 2 s . c o m public MedianIndicatorImprovement() { operatesOn = CreditDefinedOn.POPULATION; inputType = CreditFunctionInputType.SI; fitType = CreditFitnessFunctionType.I; medianCompute = new Percentile(50.0); }
From source file:gedi.util.datastructure.array.functions.NumericArrayFunction.java
public static NumericArrayFunction quantile(double q) { return new UnivariateStatisticAdapter(new Percentile(q * 100)); }
From source file:dbseer.comp.live.LiveTransactionProcessor.java
@Override public void run() { try {/*from www . jav a2 s.c om*/ this.transactionCountWriter = new PrintWriter(new FileWriter(this.transactionCountFile, true)); this.avgLatencyWriter = new PrintWriter(new FileWriter(this.avgLatencyFile, true)); } catch (IOException e) { e.printStackTrace(); } long time; // wait for transactions to come in while (true) { time = map.getMinEndTime(); if (time != Long.MAX_VALUE) { break; } else { try { Thread.sleep(250); } catch (InterruptedException e) { if (!terminate) { e.printStackTrace(); } else { return; } } } if (terminate) { break; } } String gap = " "; double totalCount = 0; double currentCount = 0; double[] count = new double[DBSeerConstants.MAX_NUM_TABLE]; double[] latencySum = new double[DBSeerConstants.MAX_NUM_TABLE]; int maxClusterId = 0; long transCount = 0; // start processing transactions while (true) { long maxTime, maxClusterEndTime; maxTime = map.getMaxEndTime(); if (!StreamClustering.getDBSCAN().isInitialized() && transCount < DBSeerConstants.DBSCAN_INIT_PTS) { transCount = map.getCount(); monitor.setGlobalTransactionCount(transCount); try { Thread.sleep(250); } catch (InterruptedException e) { e.printStackTrace(); } } // synchronized (StreamClustering.LOCK) try { StreamClustering.LOCK.lockInterruptibly(); { maxClusterEndTime = StreamClustering.getDBSCAN().getMaxEndTime(); } StreamClustering.LOCK.unlock(); while (time < maxTime && time < maxClusterEndTime) { currentCount = 0; Set<Transaction> transactions = map.pollTransactions(time); // if no transactions for the time, skip to the next timestamp. if (transactions.isEmpty()) { ++time; continue; } // if sys log not available for the time, also skip to the next timestamp; if (map.getMinSysLogTime() != Long.MAX_VALUE && map.getMinSysLogTime() > time) { ++time; continue; } boolean monitorLogFound = true; String monitorLog; while ((monitorLog = map.getSysLog(time)) == null) { if (time < map.getLastSysLogTime()) { monitorLogFound = false; break; } try { Thread.sleep(100); } catch (InterruptedException e) { if (!terminate) { e.printStackTrace(); } else { return; } } } if (!monitorLogFound) { ++time; continue; } monitorWriter.println(monitorLog); monitorWriter.flush(); for (Transaction t : transactions) { Cluster c = t.getCluster(); // if cluster is null, skip if (c == null) { continue; } int cId = c.getId(); long latency = t.getLatency(); // ignore outliers if (cId >= 0) { latencySum[cId] += latency; ++count[cId]; ++totalCount; ++currentCount; ArrayList<Double> latencyList = latencyMap.get(cId); if (latencyList == null) { latencyList = new ArrayList<Double>(); latencyMap.put(cId, latencyList); } latencyList.add((double) latency / 1000.0); } if (cId > maxClusterId) { maxClusterId = cId; } } // update live monitor // int numTrans = maxClusterId + 1; int numTrans = StreamClustering.getDBSCAN().getAllClusters().size(); synchronized (LiveMonitorInfo.LOCK) { monitor.setCurrentTimestamp(time); monitor.setNumTransactionTypes(numTrans); monitor.setGlobalTransactionCount(totalCount); for (int i = 0; i < numTrans; ++i) { monitor.setCurrentTPS(i, count[i]); if (count[i] == 0) { monitor.setCurrentAverageLatency(i, 0.0); } else { monitor.setCurrentAverageLatency(i, latencySum[i] / count[i]); } } } transactionCountWriter.print(gap); avgLatencyWriter.print(gap); transactionCountWriter.printf("%.16e", (double) time); avgLatencyWriter.printf("%.16e", (double) time); for (int i = 0; i < numTrans; ++i) { transactionCountWriter.print(gap); transactionCountWriter.printf("%.16e", count[i]); avgLatencyWriter.print(gap); if (count[i] == 0.0) { avgLatencyWriter.printf("%.16e", 0.0); } else { avgLatencyWriter.printf("%.16e", (latencySum[i] / (double) count[i] / 1000.0)); } count[i] = 0; latencySum[i] = 0; // write percentile PrintWriter writer = percentileLatencyWriter.get(i); ArrayList<Double> latencyList = latencyMap.get(i); if (latencyList == null) { latencyList = new ArrayList<Double>(); latencyMap.put(i, latencyList); } if (writer == null) { try { writer = new PrintWriter(new FileOutputStream(String.format("%s%03d", DBSeerGUI.userSettings.getDBSeerRootPath() + File.separator + DBSeerConstants.LIVE_DATASET_PATH + File.separator + "prctile_latency_", i), true)); } catch (FileNotFoundException e) { e.printStackTrace(); } percentileLatencyWriter.put(i, writer); } double[] latencies = Doubles.toArray(latencyList); writer.printf("%d,", time); for (double p : percentiles) { Percentile percentile = new Percentile(p); percentile.setData(latencies); double val = percentile.evaluate(); if (Double.isNaN(val)) val = 0.0; writer.printf("%f,", val); } writer.println(); writer.flush(); } transactionCountWriter.println(); avgLatencyWriter.println(); transactionCountWriter.flush(); avgLatencyWriter.flush(); // System.out.print((maxClusterId + 1) + ": "); // for (int i = 0; i <= maxClusterId; ++i) // { // System.out.print(count[i] + ", "); // count[i] = 0; // } // System.out.println(); // ArrayList<Cluster> clusters = (ArrayList<Cluster>)StreamClustering.getDBSCAN().getCurrentClusters(); // for (int i = 0; i < clusters.size(); ++i) // { // Cluster c1 = clusters.get(i); // for (int j = 0; j < clusters.size(); ++j) // { // Cluster c2 = clusters.get(j); // System.out.print(c1.getClusterDistance(c2) + " "); // } // System.out.println(); // } // System.out.println("----"); // is it correct to set it here? DBSeerGUI.isLiveDataReady = true; ++time; } if (terminate) { break; } Thread.sleep(100); } catch (InterruptedException e) { if (!terminate) { e.printStackTrace(); } else { return; } } } }
From source file:dbseer.comp.process.transaction.TransactionLogWriter.java
public void writeLog(long timestamp, Collection<Transaction> transactions) throws Exception { if (!this.isInitialized) { throw new Exception("TransactionLogWriter not initialized."); }/*from w ww. j a v a 2 s .co m*/ double totalCount = 0; double[][] count = new double[numServer][DBSeerConstants.MAX_NUM_TABLE]; double[][] latencySum = new double[numServer][DBSeerConstants.MAX_NUM_TABLE]; String gap = " "; if (!dbscan.isInitialized() && !dbscan.isInitializing()) { initialTransactions.addAll(transactions); if (initialTransactions.size() > dbscan.getInitPts() && !dbscan.isInitializing()) { dbscanInitializer = Executors.newSingleThreadExecutor(); dbscanInitializer.submit(new Runnable() { @Override public void run() { dbscan.initialDBSCAN(initialTransactions); } }); } } for (Transaction t : transactions) { if (dbscan != null && dbscan.isInitialized()) { if (liveLogProcessor.getTxStartTime() == 0) { liveLogProcessor.setTxStartTime(timestamp); } dbscan.train(t); } int type; if (t.getCluster() == null) { type = 0; } else { type = t.getCluster().getId(); } if (type > maxType) { maxType = type; } // if not outlier; if (type >= 0) { String server = t.getServerName(); int index = serverIndex.get(server); latencySum[index][type] += t.getLatency(); count[index][type]++; totalCount++; ArrayList<Double> latencyList = writers.get(server).getLatencyMap().get(type); if (latencyList == null) { latencyList = new ArrayList<Double>(); writers.get(server).getLatencyMap().put(type, latencyList); } latencyList.add((double) t.getLatency()); // write sample HashMap<Integer, Integer> countMap = writers.get(server).getTransactionSampleCountMap(); Integer sampleCount = countMap.get(type); if (sampleCount == null) { countMap.put(type, 1); } else { int countVal = sampleCount.intValue(); if (countVal < DBSeerConstants.MAX_TRANSACTION_SAMPLE) { HashMap<Integer, PrintWriter> sampleWriters = writers.get(server) .getTransactionSampleWriter(); PrintWriter sampleWriter = sampleWriters.get(type); if (sampleWriter == null) { sampleWriter = new PrintWriter(new FileOutputStream(String.format("%s%d", this.dir + File.separator + server + File.separator + "tx_sample_", type), false)); sampleWriters.put(type, sampleWriter); } sampleWriter.print(t.getEntireStatement()); sampleWriter.println("---"); sampleWriter.flush(); countVal++; countMap.put(type, countVal); } } } } // update live monitor if (monitor != null) { monitor.setCurrentTimestamp(timestamp); monitor.setNumTransactionTypes(maxType + 1); monitor.setGlobalTransactionCount(totalCount); for (int i = 0; i <= maxType; ++i) { double countSum = 0; double latencySumSum = 0; for (int j = 0; j < numServer; ++j) { countSum += count[j][i]; latencySumSum += latencySum[j][i]; } monitor.setCurrentTPS(i, countSum); if (countSum == 0) { monitor.setCurrentAverageLatency(i, 0.0); } else { monitor.setCurrentAverageLatency(i, latencySumSum / countSum); } } } if (timestamp < liveLogProcessor.getSysStartTime() || liveLogProcessor.getSysStartTime() == 0) { return; } for (String server : servers) { TransactionWriter writer = writers.get(server); PrintWriter tpsWriter = writer.getTpsWriter(); PrintWriter latencyWriter = writer.getLatencyWriter(); HashMap<Integer, PrintWriter> prctileLatencyWriter = writer.getPrctileLatencyWriter(); HashMap<Integer, ArrayList<Double>> latencyMap = writer.getLatencyMap(); tpsWriter.print(gap); latencyWriter.print(gap); tpsWriter.printf("%.16e", (double) timestamp); latencyWriter.printf("%.16e", (double) timestamp); int index = serverIndex.get(server); for (int i = 0; i <= maxType; ++i) { tpsWriter.print(gap); tpsWriter.printf("%.16e", count[index][i]); latencyWriter.print(gap); if (count[index][i] == 0.0) { latencyWriter.printf("%.16e", 0.0); } else { latencyWriter.printf("%.16e", (latencySum[index][i] / count[index][i]) / 1000.0); } // write percentile PrintWriter prctileWriter = prctileLatencyWriter.get(i); ArrayList<Double> latencyList = latencyMap.get(i); if (latencyList == null) { latencyList = new ArrayList<Double>(); latencyMap.put(i, latencyList); } if (prctileWriter == null) { prctileWriter = new PrintWriter(new FileOutputStream( String.format("%s%03d", this.dir + File.separator + server + File.separator + "prctile_latency_", i), false)); prctileLatencyWriter.put(i, prctileWriter); } double[] latencies = Doubles.toArray(latencyList); prctileWriter.printf("%d,", timestamp); for (double p : percentiles) { Percentile percentile = new Percentile(p); percentile.setData(latencies); double val = percentile.evaluate(); if (Double.isNaN(val)) val = 0.0; prctileWriter.printf("%f,", val / 1000.0); } prctileWriter.println(); prctileWriter.flush(); latencyList.clear(); } tpsWriter.println(); latencyWriter.println(); tpsWriter.flush(); latencyWriter.flush(); isWritingStarted = true; } }
From source file:cz.cuni.mff.peckam.ais.detection.SummingDetector.java
/** * Detect repetition in the given row/column sums. * //w w w. ja v a 2s . com * @param sums The row/column sums. * * @return <code>null</code> if no pattern has been found. Otherwise, the first entry in the tuple means offset, * while the other entry means period of repetition. */ private Tuple<Integer, Double> detectRepetition(float[] sums) { final int n0 = sums.length, t = n0; final float[] peaks = new float[t]; final double quantile = new Percentile(60).evaluate(asDouble(sums)); for (int i = 0; i < peaks.length; i++) { if (sums[i] >= quantile) peaks[i] = sums[i]; } // only local maxima should remain in peaks filterPeaks(peaks); // make the peaks uniform and normalize their weights float[] weights = new float[peaks.length]; System.arraycopy(peaks, 0, weights, 0, peaks.length); weights = normalize(weights); for (int i = 0; i < peaks.length; i++) { if (peaks[i] > 0) { peaks[i] = 1; } } final Tuple<Integer, Double[]> strategyResult = strategy.computePeriod(peaks, weights); if (strategyResult != null) return pickBestResult(strategyResult, sums); return null; }
From source file:org.hawkular.metrics.api.jaxrs.influx.InfluxSeriesHandler.java
private double quantil(List<DataPoint<Double>> in, double quantil) { double[] values = new double[in.size()]; for (int i = 0; i < in.size(); i++) { values[i] = in.get(i).getValue(); }/*from ww w. jav a 2s. c om*/ return new Percentile(quantil).evaluate(values); }