List of usage examples for org.apache.commons.math3.stat.descriptive DescriptiveStatistics getMax
public double getMax()
From source file:com.fpuna.preproceso.PreprocesoTS.java
private static TrainingSetFeature calculoFeaturesMagnitud(List<Registro> muestras, String activity) { TrainingSetFeature Feature = new TrainingSetFeature(); DescriptiveStatistics stats_m = new DescriptiveStatistics(); double[] fft_m; double[] AR_4; muestras = Util.calcMagnitud(muestras); for (int i = 0; i < muestras.size(); i++) { stats_m.addValue(muestras.get(i).getM_1()); }/*from ww w .j a va 2 s. c o m*/ //********* FFT ********* //fft_m = Util.transform(stats_m.getValues()); fft_m = FFTMixedRadix.fftPowerSpectrum(stats_m.getValues()); //******************* Calculos Magnitud *******************// //mean(s) - Arithmetic mean System.out.print(stats_m.getMean() + ","); Feature.setMeanX((float) stats_m.getMean()); //std(s) - Standard deviation System.out.print(stats_m.getStandardDeviation() + ","); Feature.setStdX((float) stats_m.getStandardDeviation()); //mad(s) - Median absolute deviation // //max(s) - Largest values in array System.out.print(stats_m.getMax() + ","); Feature.setMaxX((float) stats_m.getMax()); //min(s) - Smallest value in array System.out.print(stats_m.getMin() + ","); Feature.setMinX((float) stats_m.getMin()); //skewness(s) - Frequency signal Skewness System.out.print(stats_m.getSkewness() + ","); Feature.setSkewnessX((float) stats_m.getSkewness()); //kurtosis(s) - Frequency signal Kurtosis System.out.print(stats_m.getKurtosis() + ","); Feature.setKurtosisX((float) stats_m.getKurtosis()); //energy(s) - Average sum of the squares System.out.print(stats_m.getSumsq() / stats_m.getN() + ","); Feature.setEnergyX((float) (stats_m.getSumsq() / stats_m.getN())); //entropy(s) - Signal Entropy System.out.print(Util.calculateShannonEntropy(fft_m) + ","); Feature.setEntropyX(Util.calculateShannonEntropy(fft_m).floatValue()); //iqr (s) Interquartile range System.out.print(stats_m.getPercentile(75) - stats_m.getPercentile(25) + ","); Feature.setIqrX((float) (stats_m.getPercentile(75) - stats_m.getPercentile(25))); try { //autoregression (s) -4th order Burg Autoregression coefficients AR_4 = AutoRegression.calculateARCoefficients(stats_m.getValues(), 4, true); System.out.print(AR_4[0] + ","); System.out.print(AR_4[1] + ","); System.out.print(AR_4[2] + ","); System.out.print(AR_4[3] + ","); Feature.setArX1((float) AR_4[0]); Feature.setArX2((float) AR_4[1]); Feature.setArX3((float) AR_4[2]); Feature.setArX4((float) AR_4[3]); } catch (Exception ex) { Logger.getLogger(PreprocesoTS.class.getName()).log(Level.SEVERE, null, ex); } //meanFreq(s) - Frequency signal weighted average System.out.print(Util.meanFreq(fft_m, stats_m.getValues()) + ","); Feature.setMeanFreqx((float) Util.meanFreq(fft_m, stats_m.getValues())); //******************* Actividad *******************/ System.out.print(activity); System.out.print("\n"); Feature.setEtiqueta(activity); return Feature; }
From source file:com.linuxbox.enkive.statistics.consolidation.AbstractConsolidator.java
/** * Builds a map that cooresponds to the consolidation methods * //from w w w. j a v a 2 s . co m * @param method * - the method to use * @param exampleData * - an example data object (for type consistancy after * consolidation) * @param statsMaker * - the pre-populated DescriptiveStatstistics object to pull * stats from * @param statData * - the map to populate with consolidated data */ public void methodMapBuilder(String method, DescriptiveStatistics statsMaker, Map<String, Object> statData) { if (method.equals(CONSOLIDATION_SUM)) { statData.put(method, statsMaker.getSum()); } else if (method.equals(CONSOLIDATION_MAX)) { statData.put(method, statsMaker.getMax()); } else if (method.equals(CONSOLIDATION_MIN)) { statData.put(method, statsMaker.getMin()); } else if (method.equals(CONSOLIDATION_AVG)) { statData.put(method, statsMaker.getMean()); } }
From source file:com.caseystella.analytics.util.DistributionUtil.java
public void summary(String title, DescriptiveStatistics statistics, PrintStream pw) { pw.println(title + ": " + "\n\tMin: " + statistics.getMin() + "\n\t1th: " + statistics.getPercentile(1) + "\n\t5th: " + statistics.getPercentile(5) + "\n\t10th: " + statistics.getPercentile(10) + "\n\t25th: " + statistics.getPercentile(25) + "\n\t50th: " + statistics.getPercentile(50) + "\n\t90th: " + statistics.getPercentile(90) + "\n\t95th: " + statistics.getPercentile(95) + "\n\t99th: " + statistics.getPercentile(99) + "\n\tMax: " + statistics.getMax() + "\n\tMean: " + statistics.getMean() + "\n\tStdDev: " + statistics.getStandardDeviation()); }
From source file:com.duy.pascal.interperter.libraries.math.MathLib.java
@PascalMethod(description = "") public double MaxValue(double... arr) { DescriptiveStatistics descriptiveStatistics1 = new DescriptiveStatistics(arr); return descriptiveStatistics1.getMax(); }
From source file:com.duy.pascal.interperter.libraries.math.MathLib.java
@PascalMethod(description = "") public int MaxIntValue(int... arr) { double[] copy = new double[arr.length]; for (int i = 0; i < arr.length; i++) { copy[i] = arr[i];/* w w w .j av a2 s . co m*/ } DescriptiveStatistics descriptiveStatistics1 = new DescriptiveStatistics(copy); return (int) descriptiveStatistics1.getMax(); }
From source file:com.loadtesting.core.data.TimeSerieData.java
public TimeSerieData(String name, List<TimeSample> samples, CapturerConfig config) { this.name = name; this.unit = config.getUnit(); this.volume = samples.size(); if (volume > 0) { TimeSample first = samples.get(0); this.unit = first.getTimeUnit(); this.opening = first.getTime(unit); TimeSample last = samples.get(volume - 1); this.closing = last.getTime(unit); this.samples = config.getFilter().filter(samples); DescriptiveStatistics stats = new DescriptiveStatistics(volume); for (TimeSample timeSample : samples) { stats.addValue(timeSample.getTime(unit)); }/* w ww. j a v a2 s . c o m*/ this.high = stats.getMax(); this.low = stats.getMin(); this.median = (high + low) / 2; this.typical = (high + low + closing) / 3; this.weightedClose = (high + low + closing + closing) / 4; this.sma = stats.getMean(); this.variance = stats.getVariance(); this.sd = stats.getStandardDeviation(); this.sum = stats.getSum(); this.sumsq = stats.getSumsq(); this.skewness = stats.getSkewness(); this.kurtosis = stats.getKurtosis(); this.geometricMean = stats.getGeometricMean(); this.populationVariance = stats.getPopulationVariance(); } else { this.samples = samples; } }
From source file:org.sakaiproject.gradebookng.tool.panels.SettingsGradingSchemaPanel.java
/** * Calculates the max grade for the course * /* w w w . j av a 2 s . c o m*/ * @return String max grade */ private String getMax(DescriptiveStatistics stats) { return this.total > 0 ? String.format("%.2f", stats.getMax()) : "-"; }
From source file:com.caseystella.analytics.outlier.batch.rpca.RPCAOutlierAlgorithm.java
public double outlierScore(List<DataPoint> dataPoints, DataPoint value) { double[] inputData = new double[dataPoints.size() + 1]; int numNonZero = 0; if (scaling != ScalingFunctions.NONE) { int i = 0; final DescriptiveStatistics stats = new DescriptiveStatistics(); for (DataPoint dp : dataPoints) { inputData[i++] = dp.getValue(); stats.addValue(dp.getValue()); numNonZero += dp.getValue() > EPSILON ? 1 : 0; }/*from w w w .j a v a 2 s . co m*/ inputData[i] = value.getValue(); GlobalStatistics globalStats = new GlobalStatistics() { { setMax(stats.getMax()); setMin(stats.getMin()); setMax(stats.getMean()); setStddev(stats.getStandardDeviation()); } }; for (i = 0; i < inputData.length; ++i) { inputData[i] = scaling.scale(inputData[i], globalStats); } } else { int i = 0; for (DataPoint dp : dataPoints) { inputData[i++] = dp.getValue(); numNonZero += dp.getValue() > EPSILON ? 1 : 0; } inputData[i] = value.getValue(); } int nCols = 1; int nRows = inputData.length; if (numNonZero > minRecords) { AugmentedDickeyFuller dickeyFullerTest = new AugmentedDickeyFuller(inputData); double[] inputArrayTransformed = inputData; if (!this.isForceDiff && dickeyFullerTest.isNeedsDiff()) { // Auto Diff inputArrayTransformed = dickeyFullerTest.getZeroPaddedDiff(); } else if (this.isForceDiff) { // Force Diff inputArrayTransformed = dickeyFullerTest.getZeroPaddedDiff(); } if (this.spenalty == null) { this.lpenalty = this.LPENALTY_DEFAULT; this.spenalty = this.SPENALTY_DEFAULT / Math.sqrt(Math.max(nCols, nRows)); } // Calc Mean double mean = 0; for (int n = 0; n < inputArrayTransformed.length; n++) { mean += inputArrayTransformed[n]; } mean /= inputArrayTransformed.length; // Calc STDEV double stdev = 0; for (int n = 0; n < inputArrayTransformed.length; n++) { stdev += Math.pow(inputArrayTransformed[n] - mean, 2); } stdev = Math.sqrt(stdev / (inputArrayTransformed.length - 1)); // Transformation: Zero Mean, Unit Variance for (int n = 0; n < inputArrayTransformed.length; n++) { inputArrayTransformed[n] = (inputArrayTransformed[n] - mean) / stdev; } // Read Input Data into Array // Read Input Data into Array double[][] input2DArray = new double[nRows][nCols]; input2DArray = VectorToMatrix(inputArrayTransformed, nRows, nCols); RPCA rSVD = new RPCA(input2DArray, this.lpenalty, this.spenalty); double[][] outputE = rSVD.getE().getData(); double[][] outputS = rSVD.getS().getData(); double[][] outputL = rSVD.getL().getData(); return outputS[nRows - 1][0]; } else { return Double.NaN; } }
From source file:com.iorga.webappwatcher.analyzer.model.session.RequestsGraph.java
public synchronized Graph compute(final GraphMode graphMode, int nbItemsForDispersionTables) throws ClassNotFoundException, IOException { if (graph == null || this.nbItemsForDispersionTables != nbItemsForDispersionTables || this.graphMode != graphMode) { final Graph graph = new Graph(); /// now let's build the json series /// // first, we must create the list of different Y values final boolean isStaticMode = graphMode == GraphMode.STATIC; if (isStaticMode) { nbItemsForDispersionTables = staticDispersionTable.size() + 2; // +2 because we will add the median, and the max } else {/*www . j a va 2 s . com*/ nbItemsForDispersionTables = this.nbItemsForDispersionTables; } final double[] yValues = new double[nbItemsForDispersionTables]; graph.durationsFor1clickDispersionSeries = new ArrayList<Serie>(nbItemsForDispersionTables); final DescriptiveStatistics totalDurationsFor1click = durationPerPrincipalStats .computeTotalDurationsFor1click(); if (isStaticMode) { // static mode : median / 1s / 2s / 3s / 5s / 10s / 20s / max (median should be ordered) final List<Double> yValuesList = Lists.newArrayList(staticDispersionTable); yValuesList.add(totalDurationsFor1click.getPercentile(50)); // Add the median yValuesList.add(totalDurationsFor1click.getMax()); // Add max final List<Double> sortedYValuesList = Ordering.natural().sortedCopy(yValuesList); int i = 0; for (final Double yValue : sortedYValuesList) { yValues[i++] = yValue; } } else { for (int i = 0; i < yValues.length; i++) { yValues[i] = totalDurationsFor1click .getPercentile((i + 1d) / nbItemsForDispersionTables * 100d); } } // compute the labels for (int i = 0; i < yValues.length; i++) { graph.durationsFor1clickDispersionSeries .add(new Serie(i == 0 ? 0 : (int) yValues[i - 1], (int) yValues[i])); } // Now let's compute the datas for each Y values by slice TimeSlice previousTimeSlice = null; for (final TimeSlice timeSlice : durationPerPrincipalStats.computeTimeSliceList()) { final long endDateTime = timeSlice.getEndDate().getTime(); final long startDateTime = timeSlice.getStartDate().getTime(); //TODO : amliorer cet algorithme en itrant sur chaque value de totalDurationsFor1click et pour chacune d'elle aller chercher par dichotomie l'entier incrmenter correspondant la bonne tranche des yValues final Date middleTimeSliceDate = new Date((endDateTime + startDateTime) / 2); // the data should be displayed in the middle of the slice final boolean mustAppendNullForPrevious = previousTimeSlice != null && previousTimeSlice.getEndDate().getTime() != startDateTime; for (int i = 0; i < yValues.length; i++) { final Serie serie = graph.durationsFor1clickDispersionSeries.get(i); final double maxInclude = serie.max; final double minExclude = serie.min; final double[] values = timeSlice.getDurationsFor1click().getValues(); int n = 0; for (final double value : values) { if (minExclude < value && value <= maxInclude) { n++; } } addNewDateDoubleValueAndNullForPreviousIfNecessary(serie.data, middleTimeSliceDate, n, mustAppendNullForPrevious, previousTimeSlice); } // adding cpu & memory info addNewDateDoubleValueAndNullForPreviousIfNecessary(graph.cpuUsageMeans, middleTimeSliceDate, timeSlice.getCpuUsage().getMean(), mustAppendNullForPrevious, previousTimeSlice); addNewDateDoubleValueAndNullForPreviousIfNecessary(graph.memoryUsageMeans, middleTimeSliceDate, timeSlice.getMemoryUsage().getMean(), mustAppendNullForPrevious, previousTimeSlice); addNewDateDoubleValueAndNullForPreviousIfNecessary(graph.nbUsersMax, middleTimeSliceDate, timeSlice.getStatsPerPrincipal().size(), mustAppendNullForPrevious, previousTimeSlice); addNewDateDoubleValueAndNullForPreviousIfNecessary(graph.durationsFor1clickMedians, middleTimeSliceDate, timeSlice.getDurationsFor1click().getPercentile(50), mustAppendNullForPrevious, previousTimeSlice); previousTimeSlice = timeSlice; } this.nbItemsForDispersionTables = nbItemsForDispersionTables; this.graphMode = graphMode; this.graph = graph; // because the changement of the statistics will reset the graph here } return graph; }
From source file:de.iisys.schub.processMining.similarity.AlgoController.java
private String showDocMetaData(List<Double> cosineSimValues) { DescriptiveStatistics stat = new DescriptiveStatistics(); for (int i = 0; i < cosineSimValues.size(); i++) { stat.addValue(cosineSimValues.get(i)); }/*from www .ja v a 2s. c o m*/ double min = Math.round(stat.getMin() * 1000) / 1000.0; double max = Math.round(stat.getMax() * 1000) / 1000.0; double arithMean = Math.round(stat.getMean() * 10000) / 10000.0; double percentile = Math.round(stat.getPercentile(PERCENTILE) * 1000) / 1000.0; DecimalFormat df = new DecimalFormat("#00.00"); String meta = "Min: " + df.format(min * 100) + " %" + ", Max: " + df.format(max * 100) + " %" + ", Arith. Mean: " + df.format(arithMean * 100) + " %" + ", Percentile (" + PERCENTILE + " %): " + df.format(percentile * 100) + " %"; return meta; }