List of usage examples for org.apache.commons.math3.stat Frequency getUniqueCount
public int getUniqueCount()
From source file:com.github.rinde.rinsim.scenario.generator.PoissonProcessTest.java
/** * Checks whether the observations conform to a Poisson process with the * specified intensity. Uses a chi square test with the specified confidence. * The null hypothesis is that the observations are the result of a poisson * process./*from w ww. j a v a 2s . c o m*/ * @param observations * @param intensity * @param confidence * @return <code>true</code> if the observations */ static boolean isPoissonProcess(Frequency observations, double intensity, double length, double confidence) { final PoissonDistribution pd = new PoissonDistribution(length * intensity); final Iterator<?> it = observations.valuesIterator(); final long[] observed = new long[observations.getUniqueCount()]; final double[] expected = new double[observations.getUniqueCount()]; int index = 0; while (it.hasNext()) { final Long l = (Long) it.next(); observed[index] = observations.getCount(l); expected[index] = pd.probability(l.intValue()) * observations.getSumFreq(); if (expected[index] == 0) { return false; } index++; } final double chi = TestUtils.chiSquareTest(expected, observed); return !(chi < confidence); }
From source file:com.itemanalysis.psychometrics.polycor.AbstractPolyserialCorrelation.java
public void summarize(double[] x, int[] y) { if (x.length != y.length) throw new IllegalArgumentException("X and Y are of different lengths."); N = (double) x.length; Mean meanX = new Mean(); StandardDeviation sdX = new StandardDeviation(); PearsonCorrelation rxy = new PearsonCorrelation(); Frequency table = new Frequency(); for (int i = 0; i < N; i++) { meanX.increment(x[i]);/* ww w . j a v a 2 s .c o m*/ sdX.increment(x[i]); rxy.increment(x[i], (double) y[i]); table.addValue(y[i]); } //compute thresholds int nrow = table.getUniqueCount(); double[] freqDataY = new double[nrow]; double ntotal = table.getSumFreq(); for (int i = 0; i < (nrow - 1); i++) { freqDataY[i] = table.getCumFreq(i + 1); thresholds[i] = norm.inverseCumulativeProbability(freqDataY[i] / ntotal); } thresholds[nrow - 1] = 10;//set last threshold to a large number less than infinity }
From source file:com.itemanalysis.psychometrics.statistics.TwoWayTable.java
public int getColUniqueCountAtRow(Comparable<?> rowValue) { Frequency f = tableRows.get(rowValue); return f.getUniqueCount(); }
From source file:com.itemanalysis.psychometrics.irt.estimation.ItemResponseFileSummary.java
private ItemResponseVector[] readTapData() { byte[][] tap = new byte[35][18]; try {/*from w w w .jav a 2 s. c o m*/ File f = FileUtils.toFile(this.getClass().getResource("/testdata/tap-data.txt")); BufferedReader br = new BufferedReader(new FileReader(f)); String line = ""; String[] s = null; int row = 0; while ((line = br.readLine()) != null) { s = line.split(","); for (int j = 0; j < s.length; j++) { tap[row][j] = Byte.parseByte(s[j]); } row++; } br.close(); } catch (IOException ex) { ex.printStackTrace(); } Frequency freq = new Frequency(); for (int i = 0; i < tap.length; i++) { freq.addValue(Arrays.toString(tap[i])); } ItemResponseVector[] responseData = new ItemResponseVector[freq.getUniqueCount()]; ItemResponseVector irv = null; Iterator<Comparable<?>> iter = freq.valuesIterator(); int index = 0; //create array of ItemResponseVector objects while (iter.hasNext()) { //get response string from frequency summary and convert to byte array Comparable<?> value = iter.next(); String s = value.toString(); s = s.substring(1, s.lastIndexOf("]")); String[] sa = s.split(","); byte[] rv = new byte[sa.length]; for (int i = 0; i < sa.length; i++) { rv[i] = Byte.parseByte(sa[i].trim()); } //create response vector objects irv = new ItemResponseVector(rv, Long.valueOf(freq.getCount(value)).doubleValue()); responseData[index] = irv; index++; } // //display results of summary // for(int i=0;i<responseData.length;i++){ // System.out.println(responseData[i].toString() + ": " + responseData[i].getFrequency()); // } return responseData; }
From source file:com.itemanalysis.psychometrics.polycor.PolyserialLogLikelihoodTwoStep.java
public void summarize() throws DimensionMismatchException { if (dataX.length != dataY.length) throw new DimensionMismatchException(dataX.length, dataY.length); Frequency table = new Frequency(); meanX = new Mean(); sdX = new StandardDeviation(); rxy = new PearsonCorrelation(); for (int i = 0; i < nrow; i++) { meanX.increment(dataX[i]);/*from ww w .j av a2s. c om*/ sdX.increment(dataX[i]); rxy.increment(dataX[i], (double) dataY[i]); table.addValue(dataY[i]); } //compute thresholds nrow = table.getUniqueCount(); freqDataY = new double[nrow]; double ntotal = table.getSumFreq(); for (int i = 0; i < (nrow - 1); i++) { freqDataY[i] = table.getCumFreq(i + 1); alpha[i] = normal.inverseCumulativeProbability(freqDataY[i] / ntotal); } alpha[nrow - 1] = 10;//set last threshold to a large number less than infinity }
From source file:com.itemanalysis.jmetrik.stats.itemanalysis.ItemAnalysis.java
public int numberOfSubscales() { Frequency table = new Frequency(); for (VariableAttributes v : variables) { table.addValue(v.getItemGroup()); }//w ww.jav a 2s. c o m return table.getUniqueCount(); }
From source file:com.itemanalysis.psychometrics.irt.estimation.ItemResponseFileSummary.java
/** * Summarize comma delimited file. It will extract the data beginning in the column indicated by start * and it will continue for nItems columns. * * @param f file to summarize/*from w w w. j a v a2s . c om*/ * @param start the column index of the first item. It is zero based. If teh data start in the first column, then start=0. * @param nItems number of items to read from the file. It will begin at the column indicated by start. * @param headerIncluded true if header is included. False otherwise. The header will be omitted. * @return an array of item resposne vectors */ public ItemResponseVector[] getCondensedResponseVectors(File f, int start, int nItems, boolean headerIncluded) { Frequency freq = new Frequency(); String responseString = ""; try { BufferedReader br = new BufferedReader(new FileReader(f)); String line = ""; String[] s = null; if (headerIncluded) br.readLine();//skip header while ((line = br.readLine()) != null) { s = line.split(","); line = ""; for (int j = 0; j < nItems; j++) { line += s[j + start]; } freq.addValue(line); } br.close(); } catch (IOException ex) { ex.printStackTrace(); } ItemResponseVector[] responseData = new ItemResponseVector[freq.getUniqueCount()]; ItemResponseVector irv = null; Iterator<Comparable<?>> iter = freq.valuesIterator(); int index = 0; byte[] rv = null; //create array of ItemResponseVector objects while (iter.hasNext()) { Comparable<?> value = iter.next(); responseString = value.toString(); int n = responseString.length(); rv = new byte[n]; String response = ""; for (int i = 0; i < n; i++) { response = String.valueOf(responseString.charAt(i)).toString(); rv[i] = Byte.parseByte(response); } //create response vector objects irv = new ItemResponseVector(rv, Long.valueOf(freq.getCount(value)).doubleValue()); responseData[index] = irv; index++; } return responseData; }