List of usage examples for org.apache.commons.math3.stat.descriptive.moment StandardDeviation increment
@Override public void increment(final double d)
From source file:com.itemanalysis.psychometrics.kernel.SimplePluginBandwidthTest.java
@Test public void testValue() throws Exception { System.out.print("SimplePluginBandwidthTest: "); StandardDeviation sd = new StandardDeviation(); for (int i = 0; i < x.length; i++) { sd.increment(x[i]); }//from w ww. j a v a 2s .co m SimplePluginBandwidth bw = new SimplePluginBandwidth(sd, 1.0); System.out.println(bw.value()); assertEquals("Reference bandwidth test", 6.09603513572289, bw.value(), 1.0e-10); }
From source file:com.itemanalysis.psychometrics.polycor.AbstractPolyserialCorrelation.java
public void summarize(double[] x, int[] y) { if (x.length != y.length) throw new IllegalArgumentException("X and Y are of different lengths."); N = (double) x.length; Mean meanX = new Mean(); StandardDeviation sdX = new StandardDeviation(); PearsonCorrelation rxy = new PearsonCorrelation(); Frequency table = new Frequency(); for (int i = 0; i < N; i++) { meanX.increment(x[i]);//from w ww .j av a 2 s.c om sdX.increment(x[i]); rxy.increment(x[i], (double) y[i]); table.addValue(y[i]); } //compute thresholds int nrow = table.getUniqueCount(); double[] freqDataY = new double[nrow]; double ntotal = table.getSumFreq(); for (int i = 0; i < (nrow - 1); i++) { freqDataY[i] = table.getCumFreq(i + 1); thresholds[i] = norm.inverseCumulativeProbability(freqDataY[i] / ntotal); } thresholds[nrow - 1] = 10;//set last threshold to a large number less than infinity }
From source file:com.itemanalysis.psychometrics.irt.model.IrmGRM.java
public void incrementMeanSigma(Mean mean, StandardDeviation sd) { for (int i = 0; i < maxCategory; i++) { mean.increment(step[i]);// w ww . ja v a 2 s. c o m sd.increment(step[i]); } }
From source file:de.biomedical_imaging.ij.nanotrackj.Track.java
/** * /*from w ww .ja va 2s .c o m*/ * @param driftx Drift in x direction (in pixels) * @param drifty Drift in y direction (in pixels) * @param tau Timelag * @return [0] = The mean squared displacement, [1] = Number of data points */ public double[] getMeanSquareDisplacementSD(double driftx, double drifty, int tau) { double msd = 0; StandardDeviation sd = new StandardDeviation(); int N = 0; for (int i = tau; i < this.size(); ++i) { msd = Math.pow(this.get(i - tau).getX() - this.get(i).getX() - tau * driftx, 2) + Math.pow(this.get(i - tau).getY() - this.get(i).getY() - tau * drifty, 2); sd.increment(msd); ++N; } double[] result = new double[2]; result[0] = sd.getResult(); result[1] = N; return result; }
From source file:com.itemanalysis.psychometrics.histogram.Histogram.java
private void createHistogram(double[] x) { n = x.length;/*from ww w. j a v a 2 s . co m*/ Min min = new Min(); Max max = new Max(); Mean mean = new Mean(); StandardDeviation sd = new StandardDeviation(); for (int i = 0; i < x.length; i++) { min.increment(x[i]); max.increment(x[i]); mean.increment(x[i]); sd.increment(x[i]); } double range = max.getResult() - min.getResult(); double lowestBoundary = min.getResult() - range / 1000; double largestBoundary = max.getResult() + range / 1000; if (binCalculationType == BinCalculationType.SCOTT) { binCalc = new ScottBinCalculation(n, min.getResult(), max.getResult(), sd.getResult()); } else if (binCalculationType == BinCalculationType.FREEDMAN_DIACONIS) { Percentile percentile = new Percentile(); double q1 = percentile.evaluate(x, 25); double q3 = percentile.evaluate(x, 75); binCalc = new FreedmanDiaconisBinCalculation(n, min.getResult(), max.getResult(), q1, q3); } else if (binCalculationType == BinCalculationType.STURGES) { binCalc = new SturgesBinCalculation(n, min.getResult(), max.getResult()); } numberOfBins = binCalc.numberOfBins(); binWidth = binCalc.binWidth(); //create bins createBins(lowestBoundary, largestBoundary); //count observations in each bin for (int i = 0; i < n; i++) { for (Bin b : bins) { b.increment(x[i]); } } }
From source file:com.itemanalysis.jmetrik.stats.transformation.LinearTransformationAnalysis.java
public String transformScore() throws SQLException { Statement stmt = null;//from w w w. j a v a 2 s .co m ResultSet rs = null; Double constrainedScore = null; try { //add variable to db dao.addColumnToDb(conn, tableName, addedVariableInfo); conn.setAutoCommit(false);//begin transaction Table sqlTable = new Table(tableName.getNameForDatabase()); SelectQuery select = new SelectQuery(); select.addColumn(sqlTable, selectedVariable.getName().nameForDatabase()); select.addColumn(sqlTable, addedVariableInfo.getName().nameForDatabase()); stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE); rs = stmt.executeQuery(select.toString()); this.firePropertyChange("message", "", "Transforming scores..."); double origValue = 0.0; double transValue = 0.0; double z = 0.0; StandardDeviation sd = new StandardDeviation(); Mean mean = new Mean(); Min min = new Min(); Max max = new Max(); while (rs.next()) { origValue = rs.getDouble(selectedVariable.getName().nameForDatabase()); if (!rs.wasNull()) { sd.increment(origValue); mean.increment(origValue); min.increment(origValue); max.increment(origValue); } updateProgress(); } double meanValue = mean.getResult(); double sdValue = sd.getResult(); double minValue = min.getResult(); double maxValue = max.getResult(); double A = 1.0; double B = 0.0; rs.beforeFirst(); while (rs.next()) { origValue = rs.getDouble(selectedVariable.getName().nameForDatabase()); if (!rs.wasNull()) { if (type1) { z = (origValue - meanValue) / sdValue; transValue = scaleSd * z + scaleMean; transValue = checkConstraints(transValue); } else { A = (maxPossibleScore - minPossibleScore) / (maxValue - minValue); B = minPossibleScore - minValue * A; transValue = origValue * A + B; transValue = checkConstraints(transValue); } descriptiveStatistics.increment(transValue); rs.updateDouble(addedVariableInfo.getName().nameForDatabase(), transValue); rs.updateRow(); } updateProgress(); } conn.commit(); conn.setAutoCommit(true); //create output DefaultLinearTransformation linearTransformation = new DefaultLinearTransformation(); linearTransformation.setScale(A); linearTransformation.setIntercept(B); StringBuilder sb = new StringBuilder(); Formatter f = new Formatter(sb); f.format(publishHeader()); f.format(descriptiveStatistics.toString()); f.format(linearTransformation.toString()); f.format("%n"); f.format("%n"); return f.toString(); } catch (SQLException ex) { conn.rollback(); conn.setAutoCommit(true); throw ex; } finally { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } }
From source file:com.itemanalysis.psychometrics.irt.model.IrmGPCM2.java
public void incrementMeanSigma(Mean mean, StandardDeviation sd) { for (int i = 0; i < ncatM1; i++) { mean.increment(difficulty - threshold[i]); sd.increment(difficulty - threshold[i]); }/*from ww w .j a va 2s. c om*/ }
From source file:com.github.rinde.rinsim.scenario.generator.NHPoissonProcessTest.java
@Ignore @Test//www.j a va 2 s . co m public void test() throws IOException { final int numSamples = 100; final long lengthOfScenario = 4 * 60 * 60 * 1000; final double period = 30 * 60 * 1000; final int[] orders = new int[] { 10, 20, 30, 40, 50, 75, 100, 150, 200, 500 }; final List<Point> dataPoints = newArrayList(); final RandomGenerator rng = new MersenneTwister(123); final List<Double> relHeights = newArrayList(); for (int i = 0; i < 10; i++) { relHeights.add(-.999 + i * .001); } for (int i = 0; i < 100; i++) { relHeights.add(-.99 + i * .05); } // for (int i = 0; i < 50; i++) { // relHeights.add(3.99 + (i * .5)); // } Files.createParentDirs(new File("files/test/times/relheight-dynamism.txt")); final BufferedWriter writer = Files.newWriter(new File("files/test/times/relheight-dynamism.txt"), Charsets.UTF_8); for (int k = 0; k < orders.length; k++) { for (int i = 0; i < relHeights.size(); i++) { final double d = relHeights.get(i); final double relHeight = d;// -.99 + (j * .05); // final double period = 3600d; final double ordersPerPeriod = orders[k] / (lengthOfScenario / period); final IntensityFunction intensity = IntensityFunctions.sineIntensity().height(d).period(period) .area(ordersPerPeriod).build(); System.out.printf("%1d relative height: %1.3f%n", i, relHeight); // final List<Double> sineTimes = FluentIterable // .from( // ContiguousSet.create(Range.closedOpen(0L, lengthOfScenario), // DiscreteDomain.longs())) // .transform(Conversion.LONG_TO_DOUBLE) // .transform(intensity) // .toList(); // Analysis // .writeLoads( // sineTimes, // new File( // "files/test/times/sine/sine-" // + Strings.padStart(Integer.toString(i), 2, '0') // + ".intens")); final TimeSeriesGenerator generator = TimeSeries.nonHomogenousPoisson(lengthOfScenario, intensity); double max = 0; double sum = 0; final StandardDeviation sd = new StandardDeviation(); final List<Double> dynamismValues = newArrayList(); for (int j = 0; j < numSamples; j++) { List<Double> times = generator.generate(rng.nextLong()); while (times.size() < 2) { times = generator.generate(rng.nextLong()); } final double dyn = Metrics.measureDynamism(times, lengthOfScenario); dynamismValues.add(dyn); sd.increment(dyn); sum += dyn; max = Math.max(max, dyn); // if (j < 3) { // // System.out.printf("%1.3f%% %d%n", dyn * 100, times.size()); // Analysis.writeTimes( // lengthOfScenario, // times, // new File( // "files/test/times/orders" // + Strings.padStart(Integer.toString(i), 2, '0') + "_" // + j // + "-" + (dyn * 100) // + ".times")); // } } try { writer.append(Double.toString(relHeight)); writer.append(" "); writer.append(Integer.toString(orders[k])); writer.append(" "); writer.append(Joiner.on(" ").join(dynamismValues).toString()); writer.append("\n"); } catch (final IOException e) { checkState(false); } System.out.printf(" > dyn %1.3f+-%1.3f%n", +(sum / numSamples), sd.getResult()); dataPoints.add(new Point(relHeight, sum / numSamples)); } } writer.close(); // Analysis.writeLocationList(dataPoints, new File( // "files/test/times/intensity-analysis.txt")); }
From source file:com.itemanalysis.psychometrics.irt.model.Irm4PL.java
/** * Mean/sigma linking coefficients are computed from the mean and standard deviation of item difficulty. * The summary statistics are computed in a storeless manner. This method allows for the incremental * update to item difficulty summary statistics by combining them with other summary statistics. * * @param mean item difficulty mean.// w w w . ja va 2s .c om * @param sd item difficulty standard deviation. */ public void incrementMeanSigma(Mean mean, StandardDeviation sd) {//TODO check for correctness mean.increment(difficulty); sd.increment(difficulty); }
From source file:com.itemanalysis.psychometrics.irt.model.IrmGPCM.java
public void incrementMeanSigma(Mean mean, StandardDeviation sd) { for (int i = 1; i < ncat; i++) {//Start at 1 because first step is fixed to zero. Do not count it here. mean.increment(step[i]);//from w w w. jav a 2 s.c o m sd.increment(step[i]); } }