List of usage examples for org.apache.commons.math3.stat.descriptive.moment Mean Mean
public Mean()
From source file:com.biomeris.i2b2.export.engine.misc.ObservationAggregator.java
public ObservationAggregator() { super();/*from ww w . j a v a2 s. com*/ mean = new Mean(); median = new Median(); standardDeviation = new StandardDeviation(); numericValues = new ArrayList<>(); stringValues = new ArrayList<>(); }
From source file:com.cloudera.oryx.app.traffic.Endpoint.java
protected Endpoint(String path, double relativeProb) { Preconditions.checkArgument(relativeProb > 0.0); this.path = path; this.relativeProb = relativeProb; meanTimeMS = new Mean(); stdevTimeMS = new StandardDeviation(); }
From source file:com.cloudera.oryx.rdf.common.rule.NumericPrediction.java
static NumericPrediction buildNumericPrediction(Iterable<Example> examples) { StorelessUnivariateStatistic mean = new Mean(); for (Example example : examples) { mean.increment(((NumericFeature) example.getTarget()).getValue()); }/*from w w w . ja v a 2 s . c o m*/ Preconditions.checkState(mean.getN() > 0); return new NumericPrediction((float) mean.getResult(), (int) mean.getN()); }
From source file:com.itemanalysis.psychometrics.scaling.KelleyRegressedScoreTest.java
/** * Test of rho method, of class KelleyRegressedScore. *//*from w w w . j a v a 2 s .co m*/ //@Test public void testValue() { System.out.println("Kelley score test"); double[][] x = getData(); double[] sum = new double[1000]; Mean mean = new Mean(); CovarianceMatrix S = new CovarianceMatrix(50); for (int i = 0; i < x.length; i++) { sum[i] = 0.0; for (int j = 0; j < 50; j++) { for (int k = 0; k < 50; k++) { S.increment(j, k, x[i][j], x[i][k]); } sum[i] += x[i][j]; } } CoefficientAlpha alpha = new CoefficientAlpha(S, false); KelleyRegressedScore kscore = new KelleyRegressedScore(mean.evaluate(sum), alpha); double[] kscores = this.getKelleyScores(); double kelley = 0.0; for (int i = 0; i < kscores.length; i++) { kelley = kscore.value(sum[i]); assertEquals(kscores[i], kelley, 1e-5); } }
From source file:com.itemanalysis.psychometrics.polycor.AbstractPolyserialCorrelation.java
public void summarize(double[] x, int[] y) { if (x.length != y.length) throw new IllegalArgumentException("X and Y are of different lengths."); N = (double) x.length; Mean meanX = new Mean(); StandardDeviation sdX = new StandardDeviation(); PearsonCorrelation rxy = new PearsonCorrelation(); Frequency table = new Frequency(); for (int i = 0; i < N; i++) { meanX.increment(x[i]);/*from w w w . j ava2 s . c o m*/ sdX.increment(x[i]); rxy.increment(x[i], (double) y[i]); table.addValue(y[i]); } //compute thresholds int nrow = table.getUniqueCount(); double[] freqDataY = new double[nrow]; double ntotal = table.getSumFreq(); for (int i = 0; i < (nrow - 1); i++) { freqDataY[i] = table.getCumFreq(i + 1); thresholds[i] = norm.inverseCumulativeProbability(freqDataY[i] / ntotal); } thresholds[nrow - 1] = 10;//set last threshold to a large number less than infinity }
From source file:com.itemanalysis.psychometrics.factoranalysis.VarimaxCriteria.java
/** * Computes the function value for varimax rotation. * * @param L matrix of factor loadings./* w w w . j a v a2 s. c om*/ */ public void computeValues(RealMatrix L) { //initialize dimensions and column mean array int nrow = L.getRowDimension(); int ncol = L.getColumnDimension(); Mean[] colMean = new Mean[ncol]; for (int i = 0; i < ncol; i++) { colMean[i] = new Mean(); } //square each element in matrix RealMatrix L2 = L.copy(); double value = 0.0; for (int i = 0; i < nrow; i++) { for (int j = 0; j < ncol; j++) { value = L.getEntry(i, j); value *= value; L2.setEntry(i, j, value); colMean[j].increment(value); } } double dif = 0.0; RealMatrix QL = new Array2DRowRealMatrix(nrow, ncol); for (int i = 0; i < nrow; i++) { for (int j = 0; j < ncol; j++) { dif = L2.getEntry(i, j) - colMean[j].getResult(); QL.setEntry(i, j, dif); } } //compute gradientAt gradient = new Array2DRowRealMatrix(nrow, ncol); for (int i = 0; i < nrow; i++) { for (int j = 0; j < ncol; j++) { value = -L.getEntry(i, j) * QL.getEntry(i, j); gradient.setEntry(i, j, value); } } //compute function value RealMatrix B = QL.transpose().multiply(QL); double sum = B.getTrace(); functionValue = -sum / 4.0; }
From source file:com.cloudera.oryx.rdf.common.eval.Evaluation.java
/** * @param testSet test set to evaluate on * @return average absolute value of numeric target value in the test set *//* w ww . ja v a2 s . co m*/ private static double meanAbs(Iterable<Example> testSet) { StorelessUnivariateStatistic mean = new Mean(); for (Example test : testSet) { NumericFeature actual = (NumericFeature) test.getTarget(); mean.increment(FastMath.abs(actual.getValue())); } return mean.getResult(); }
From source file:fuzzy.df.MeanOfMaximaDefuzzificationFunction.java
public Double evaluate(NumericRange<T> x, MembershipFunction<T> mf) { Collection<Double> maximumValues = MaxMF.of(x.toCollection(), mf).keySet(); double mean = new Mean().evaluate(Doubles.toArray(maximumValues)); return mean;// w w w . j a va 2s. c o m }
From source file:com.itemanalysis.psychometrics.polycor.PolyserialLogLikelihoodTwoStep.java
public void summarize() throws DimensionMismatchException { if (dataX.length != dataY.length) throw new DimensionMismatchException(dataX.length, dataY.length); Frequency table = new Frequency(); meanX = new Mean(); sdX = new StandardDeviation(); rxy = new PearsonCorrelation(); for (int i = 0; i < nrow; i++) { meanX.increment(dataX[i]);/* www. j ava2s . c om*/ sdX.increment(dataX[i]); rxy.increment(dataX[i], (double) dataY[i]); table.addValue(dataY[i]); } //compute thresholds nrow = table.getUniqueCount(); freqDataY = new double[nrow]; double ntotal = table.getSumFreq(); for (int i = 0; i < (nrow - 1); i++) { freqDataY[i] = table.getCumFreq(i + 1); alpha[i] = normal.inverseCumulativeProbability(freqDataY[i] / ntotal); } alpha[nrow - 1] = 10;//set last threshold to a large number less than infinity }
From source file:net.myrrix.online.candidate.LocationSensitiveHashTest.java
@Test public void testLSH() { System.setProperty("model.lsh.sampleRatio", "0.1"); System.setProperty("model.lsh.numHashes", "20"); RandomGenerator random = RandomManager.getRandom(); Mean avgPercentTopRecsConsidered = new Mean(); Mean avgNDCG = new Mean(); Mean avgPercentAllItemsConsidered = new Mean(); for (int iteration = 0; iteration < ITERATIONS; iteration++) { FastByIDMap<float[]> Y = new FastByIDMap<float[]>(); for (int i = 0; i < NUM_ITEMS; i++) { Y.put(i, RandomUtils.randomUnitVector(NUM_FEATURES, random)); }/*from w ww.j a v a2 s . c om*/ float[] userVec = RandomUtils.randomUnitVector(NUM_FEATURES, random); double[] results = doTestRandomVecs(Y, userVec); double percentTopRecsConsidered = results[0]; double ndcg = results[1]; double percentAllItemsConsidered = results[2]; log.info("Considered {}% of all candidates, {} nDCG, got {}% recommendations correct", 100 * percentAllItemsConsidered, ndcg, 100 * percentTopRecsConsidered); avgPercentTopRecsConsidered.increment(percentTopRecsConsidered); avgNDCG.increment(ndcg); avgPercentAllItemsConsidered.increment(percentAllItemsConsidered); } log.info("{}", avgPercentTopRecsConsidered.getResult()); log.info("{}", avgNDCG.getResult()); log.info("{}", avgPercentAllItemsConsidered.getResult()); assertTrue(avgPercentTopRecsConsidered.getResult() > 0.55); assertTrue(avgNDCG.getResult() > 0.55); assertTrue(avgPercentAllItemsConsidered.getResult() < 0.075); }