List of usage examples for org.apache.commons.math3.stat.descriptive.moment StandardDeviation increment
@Override public void increment(final double d)
From source file:com.itemanalysis.jmetrik.graph.nicc.NonparametricCurveAnalysis.java
private void initializeGridPoints() throws SQLException { Statement stmt = null;/*from w w w .j av a 2s . c o m*/ ResultSet rs = null; //connect to db try { Table sqlTable = new Table(tableName.getNameForDatabase()); SelectQuery select = new SelectQuery(); select.addColumn(sqlTable, regressorVariable.getName().nameForDatabase()); stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY); rs = stmt.executeQuery(select.toString()); Min min = new Min(); Max max = new Max(); Mean mean = new Mean(); StandardDeviation sd = new StandardDeviation(); double value = 0.0; while (rs.next()) { value = rs.getDouble(regressorVariable.getName().nameForDatabase()); if (!rs.wasNull()) { min.increment(value); max.increment(value); mean.increment(value); sd.increment(value); } updateProgress(); } rs.close(); stmt.close(); //evaluation points double sdv = sd.getResult(); double mn = mean.getResult(); double lower = mn - 2.5 * sdv; double upper = mn + 2.5 * sdv; bwAdjustment *= sdv; bandwidth = new NonparametricIccBandwidth(sampleSize, bwAdjustment); gridPoints = command.getFreeOption("gridpoints").getInteger(); // uniformDistributionApproximation = new UniformDistributionApproximation( // min.getResult(), max.getResult(), gridPoints); uniformDistributionApproximation = new UniformDistributionApproximation(lower, upper, gridPoints); } catch (SQLException ex) { throw ex; } finally { if (rs != null) rs.close(); if (stmt != null) stmt.close(); } }
From source file:com.itemanalysis.psychometrics.irt.model.Irm3PL.java
/** * Mean/sigma linking coefficients are computed from the mean and standard deviation of item difficulty. * The summary statistics are computed in a storeless manner. This method allows for the incremental * update to item difficulty summary statistics by combining them with other summary statistics. * * @param mean item difficulty mean./*from w ww .j a v a 2 s . co m*/ * @param sd item difficulty standard deviation. */ public void incrementMeanSigma(Mean mean, StandardDeviation sd) { mean.increment(difficulty); sd.increment(difficulty); }
From source file:com.itemanalysis.psychometrics.rasch.JMLE.java
public void linearTransformation(DefaultLinearTransformation lt, int precision) { Mean pMean = new Mean(); StandardDeviation pSd = new StandardDeviation(); //set transformation and rescale persons double newScale = lt.getScale(); double newMean = lt.getIntercept(); double oldPersonMean = pMean.evaluate(theta); double oldPersonSd = pSd.evaluate(theta); lt.setScaleAndIntercept(oldPersonMean, newMean, oldPersonSd, newScale); for (int i = 0; i < theta.length; i++) { theta[i] = lt.transform(theta[i]); }// www . j av a 2s . c o m //set transformation and rescale items Mean iMean = new Mean(); StandardDeviation iSd = new StandardDeviation(); double tempDifficulty = 0.0; for (VariableName v : items.keySet()) { tempDifficulty = items.get(v).getDifficulty(); iMean.increment(tempDifficulty); iSd.increment(tempDifficulty); } lt.setScaleAndIntercept(iMean.getResult(), newMean, iSd.getResult(), newScale); for (VariableName v : items.keySet()) { items.get(v).linearTransformation(lt, precision); } //set transformation and rescale thresholds RatingScaleThresholds tempThresholds = null; for (String s : thresholds.keySet()) { tempThresholds = thresholds.get(s); lt.setScaleAndIntercept(tempThresholds.getThresholdMean(), newMean, tempThresholds.getThresholdStandardDeviation(), newScale); thresholds.get(s).linearTransformation(lt, precision); } }
From source file:com.itemanalysis.psychometrics.irt.estimation.StartingValues.java
/** * Computes normal approximation estimates (PROX) of item difficulty and person ability * in a way that allows for missing data (Linacre, 1994). It is an iterative procedure. * * Linacre, J. M., (1994). PROX with missing data, or known item or person measures. * Rasch Measurement Transactions, 8:3, 378, http://www.rasch.org/rmt/rmt83g.htm. * * @param converge convergence criterion as the maximum change in person logits. * @param maxIter maximum number of iterations. About 10 iterations works well. *///from w ww .j a va2 s.co m private void prox(double converge, int maxIter) { double delta = 1.0 + converge; int iter = 0; double pProx = 0; double pScore = 0; double maxTestScore = 0; double maxChange = 0; double logit = 0; Mean personGrandMean = new Mean(); StandardDeviation personGrandSd = new StandardDeviation(); double iProx = 0.0; double iMean = 0; theta = new double[nResponseVectors]; Mean[] mPerson = new Mean[nItems];//Item difficulty mean for those examinees completing item j StandardDeviation[] sdPerson = new StandardDeviation[nItems];//Item difficulty standard deviation for those examinees completing item j double[] Si = null; double[] Ni = null; Mean[] mItem = new Mean[nResponseVectors]; StandardDeviation[] sdItem = new StandardDeviation[nResponseVectors]; while (delta > converge && iter < maxIter) { Si = new double[nItems]; Ni = new double[nItems]; //Compute descriptive statistics for persons and items double resp = 0; double freq = 0; for (int l = 0; l < nResponseVectors; l++) { freq = responseVector[l].getFrequency(); for (int j = 0; j < nItems; j++) { //initialize arrays if (l == 0) { mPerson[j] = new Mean(); sdPerson[j] = new StandardDeviation(); } if (j == 0) { mItem[l] = new Mean(); sdItem[l] = new StandardDeviation(); } if (irm[j].getType() == IrmType.L3 || irm[j].getType() == IrmType.L4) { resp = responseVector[l].getResponseAt(j); //increment item and person summary statistics if (resp != -1) { //incorporate weights - crude workaround for (int w = 0; w < freq; w++) { mItem[l].increment(irm[j].getDifficulty()); sdItem[l].increment(irm[j].getDifficulty()); mPerson[j].increment(theta[l]); sdPerson[j].increment(theta[l]); Si[j] += resp; Ni[j]++; } } } } //end item loop } //end summary loop //Compute item PROX for binary items only iMean = 0; double pSd = 1e-8; double ni = 0; for (int j = 0; j < nItems; j++) { if (irm[j].getType() == IrmType.L3 || irm[j].getType() == IrmType.L4) { pSd = sdPerson[j].getResult(); //adjust extreme item scores if (Si[j] == 0) Si[j] += 0.3; if (Si[j] == Ni[j]) Si[j] -= 0.3; logit = Math.log(Si[j] / (Ni[j] - Si[j])); iProx = mPerson[j].getResult() - Math.sqrt(1.0 + pSd / 2.9) * logit; irm[j].setDifficulty(iProx); iMean += iProx; ni++; } } iMean /= ni; //center difficulties about the mean item difficulty for (int j = 0; j < nItems; j++) { if (irm[j].getType() == IrmType.L3 || irm[j].getType() == IrmType.L4) { iProx = irm[j].getDifficulty(); irm[j].setDifficulty(iProx - iMean); } } //Compute person PROX maxChange = 0; personGrandMean.clear(); personGrandSd.clear(); Pair<Double, Double> personScores = null; for (int l = 0; l < nResponseVectors; l++) { personScores = computePersonScores(responseVector[l]); pScore = personScores.getFirst(); maxTestScore = personScores.getSecond(); //adjust extreme person scores if (pScore == 0) pScore += 0.3; if (pScore == maxTestScore) pScore -= 0.3; logit = Math.log(pScore / (maxTestScore - pScore)); pProx = mItem[l].getResult() + Math.sqrt(1.0 + sdItem[l].getResult() / 2.9) * logit; maxChange = Math.max(maxChange, Math.abs(theta[l] - pProx)); theta[l] = pProx; personGrandMean.increment(pProx); personGrandSd.increment(pProx); } delta = maxChange; iter++; fireEMStatusEvent(iter, delta, Double.NaN); } //end while //Linearly transform theta estimate to have a mean of 0 and a standard deviation of 1. //Apply the same transformation to item difficulty values. double A = 1.0 / personGrandSd.getResult(); double B = -A * personGrandMean.getResult(); for (int l = 0; l < nResponseVectors; l++) { theta[l] = theta[l] * A + B; } double a = 1; double b = 0; for (int j = 0; j < nItems; j++) { if (irm[j].getType() == IrmType.L3 || irm[j].getType() == IrmType.L3) { b = irm[j].getDifficulty(); irm[j].setDifficulty(b * A + B); //Adjust discrimination parameter for scaling constant. //PROX assumes a logit scale. This conversion is to convert to the normal metric. a = irm[j].getDiscrimination(); irm[j].setDiscrimination(a / irm[j].getScalingConstant()); } } //For debugging // System.out.println("ITER: " + iter); // for(int j=0;j<nItems;j++){ // System.out.println("PROX: " + irm[j].toString()); // } }
From source file:org.jpmml.evaluator.functions.StandardDeviationFunction.java
static private Double evaluate(Collection<?> values, boolean biasCorrected) { StandardDeviation statistic = new StandardDeviation(); statistic.setBiasCorrected(biasCorrected); for (Object value : values) { Double doubleValue = (Double) TypeUtil.parseOrCast(DataType.DOUBLE, value); statistic.increment(doubleValue); }// www . j a va 2 s. c o m return statistic.getResult(); }
From source file:org.terracotta.statistics.derived.histogram.HistogramFittingTest.java
@Test @Ignore/*from w w w. java 2 s . c o m*/ public void evaluateFlatHistogramErrors() { StandardDeviation parameter = new StandardDeviation(); for (int i = 0; i < 1000; i++) { parameter.increment(flatHistogramFit(System.nanoTime())[1]); System.out.println("Flat Histogram Slope: Iteration: " + i + " S.D:" + parameter.getResult()); } }
From source file:org.terracotta.statistics.derived.histogram.HistogramFittingTest.java
@Test @Ignore/* w w w.j av a2 s. c om*/ public void evaluateGaussianHistogramErrors() { StandardDeviation centroid = new StandardDeviation(); StandardDeviation width = new StandardDeviation(); for (int i = 0; i < 1000; i++) { double[] parameters = gaussianHistogramFit(System.nanoTime()); centroid.increment(parameters[1]); width.increment(parameters[2]); System.out.println("Gaussian Histogram Centroid: Iteration: " + i + " S.D:" + centroid.getResult()); System.out.println("Gaussian Histogram Width: Iteration: " + i + " S.D:" + width.getResult()); } }