Example usage for org.apache.commons.math3.stat.descriptive.moment StandardDeviation StandardDeviation

List of usage examples for org.apache.commons.math3.stat.descriptive.moment StandardDeviation StandardDeviation

Introduction

In this page you can find the example usage for org.apache.commons.math3.stat.descriptive.moment StandardDeviation StandardDeviation.

Prototype

public StandardDeviation() 

Source Link

Document

Constructs a StandardDeviation.

Usage

From source file:com.itemanalysis.psychometrics.polycor.PolyserialPlugin.java

public PolyserialPlugin() {
    r = new PearsonCorrelation();
    sdX = new StandardDeviation();
    sdY = new StandardDeviation();
    freqY = new Frequency();
    norm = new NormalDistribution();
}

From source file:com.itemanalysis.psychometrics.polycor.AbstractPolyserialCorrelation.java

public void summarize(double[] x, int[] y) {
    if (x.length != y.length)
        throw new IllegalArgumentException("X and Y are of different lengths.");
    N = (double) x.length;
    Mean meanX = new Mean();
    StandardDeviation sdX = new StandardDeviation();
    PearsonCorrelation rxy = new PearsonCorrelation();
    Frequency table = new Frequency();

    for (int i = 0; i < N; i++) {
        meanX.increment(x[i]);/*from  w  w  w  .ja  v a  2  s  . co  m*/
        sdX.increment(x[i]);
        rxy.increment(x[i], (double) y[i]);
        table.addValue(y[i]);
    }

    //compute thresholds
    int nrow = table.getUniqueCount();
    double[] freqDataY = new double[nrow];
    double ntotal = table.getSumFreq();
    for (int i = 0; i < (nrow - 1); i++) {
        freqDataY[i] = table.getCumFreq(i + 1);
        thresholds[i] = norm.inverseCumulativeProbability(freqDataY[i] / ntotal);
    }
    thresholds[nrow - 1] = 10;//set last threshold to a large number less than infinity

}

From source file:com.itemanalysis.psychometrics.kernel.LikelihoodCrossValidation.java

private void computeBounds() {
    StandardDeviation sd = new StandardDeviation();
    this.max = sd.evaluate(x);
}

From source file:com.facebook.presto.operator.aggregation.TestDoubleStdDevAggregation.java

@Override
public Number getExpectedValue(int start, int length) {
    if (length < 2) {
        return null;
    }//from  w w  w.j  ava2  s.  c  om

    double[] values = new double[length];
    for (int i = 0; i < length; i++) {
        values[i] = start + i;
    }

    StandardDeviation stdDev = new StandardDeviation();
    return stdDev.evaluate(values);
}

From source file:com.itemanalysis.psychometrics.kernel.LeastSquaresCrossValidation.java

private void computeBounds() throws Exception {
    StandardDeviation stdev = new StandardDeviation();
    this.sd = stdev.evaluate(x);
    Min min = new Min();
    double from = min.evaluate(x);
    Max max = new Max();
    double to = max.evaluate(x);

}

From source file:com.itemanalysis.psychometrics.polycor.PolyserialLogLikelihoodTwoStep.java

public void summarize() throws DimensionMismatchException {
    if (dataX.length != dataY.length)
        throw new DimensionMismatchException(dataX.length, dataY.length);
    Frequency table = new Frequency();
    meanX = new Mean();
    sdX = new StandardDeviation();
    rxy = new PearsonCorrelation();
    for (int i = 0; i < nrow; i++) {
        meanX.increment(dataX[i]);// w  w w.  jav a  2 s  .c  om
        sdX.increment(dataX[i]);
        rxy.increment(dataX[i], (double) dataY[i]);
        table.addValue(dataY[i]);
    }

    //compute thresholds
    nrow = table.getUniqueCount();
    freqDataY = new double[nrow];
    double ntotal = table.getSumFreq();
    for (int i = 0; i < (nrow - 1); i++) {
        freqDataY[i] = table.getCumFreq(i + 1);
        alpha[i] = normal.inverseCumulativeProbability(freqDataY[i] / ntotal);
    }
    alpha[nrow - 1] = 10;//set last threshold to a large number less than infinity
}

From source file:de.biomedical_imaging.traJ.features.StandardDeviationDirectionFeature.java

@Override
public double[] evaluate() {
    StandardDeviation sd = new StandardDeviation();
    double[] values = new double[t.size() - timelag - 1];
    double subx = 0;
    double suby = 0;
    double subz = 0;

    for (int i = timelag + 1; i < t.size(); i++) {

        subx = t.get(i - timelag - 1).x;
        suby = t.get(i - timelag - 1).y;
        subz = t.get(i - timelag - 1).z;

        Vector3d v1 = new Vector3d(t.get(i - timelag).x - subx, t.get(i - timelag).y - suby,
                t.get(i - timelag).z - subz);

        subx = t.get(i - 1).x;//from   www  . jav a 2s .c  o m
        suby = t.get(i - 1).y;
        subz = t.get(i - 1).z;
        Vector3d v2 = new Vector3d(t.get(i).x - subx, t.get(i).y - suby, t.get(i).z - subz);

        double v = v1.angle(v2);

        boolean v1IsZero = TrajectoryUtil.isZero(v1.x) && TrajectoryUtil.isZero(v1.y)
                && TrajectoryUtil.isZero(v1.z);
        boolean v2IsZero = TrajectoryUtil.isZero(v2.x) && TrajectoryUtil.isZero(v2.y)
                && TrajectoryUtil.isZero(v2.z);
        if (v1IsZero || v2IsZero) {
            v = 0;
        }
        values[i - timelag - 1] = v;
        //System.out.println("da " + v1.angle(v2));
    }

    sd.setData(values);
    result = new double[] { sd.evaluate() };

    return result;
}

From source file:com.itemanalysis.psychometrics.measurement.ClassicalItemStatistics.java

public ClassicalItemStatistics(Object id, boolean biasCorrection, boolean pearson, boolean dIndex) {
    this.biasCorrection = biasCorrection;
    this.pearson = pearson;
    this.dIndex = dIndex;
    mean = new Mean();
    sd = new StandardDeviation();

    if (dIndex) {
        upper = new Mean();
        lower = new Mean();
    }//from   w w  w .  j  ava 2 s .com

    if (this.pearson) {
        pointBiserial = new PearsonCorrelation();
    } else {
        polyserial = new PolyserialPlugin();
    }
}

From source file:com.cloudera.oryx.rdf.common.information.NumericInformationTest.java

@Test
public void testInformationCategoricalFeature() {
    ExampleSet exampleSet = examplesForValuesForCategories(new float[][] { new float[] { 1.0f, 1.5f },
            new float[] { 5.5f, 7.0f }, new float[] { 2.0f, 5.0f }, });
    List<Decision> decisions = Decision.decisionsFromExamples(exampleSet, 0, 100);
    assertEquals(2, decisions.size());/*from w w w  .ja  v  a  2  s.com*/
    BitSet categories0 = ((CategoricalDecision) decisions.get(0)).getCategoryIDs();
    BitSet categories1 = ((CategoricalDecision) decisions.get(1)).getCategoryIDs();
    assertEquals(1, categories0.cardinality());
    assertTrue(categories0.get(0));
    assertEquals(2, categories1.cardinality());
    assertTrue(categories1.get(0));
    assertTrue(categories1.get(2));

    Pair<Decision, Double> best = NumericalInformation.bestGain(decisions, exampleSet);
    assertEquals(categories0, ((CategoricalDecision) best.getFirst()).getCategoryIDs());

    StandardDeviation all = new StandardDeviation();
    all.incrementAll(new double[] { 1.0, 1.5, 5.5, 7.0, 2.0, 5.0 });
    StandardDeviation positive = new StandardDeviation();
    positive.incrementAll(new double[] { 1.0, 1.5 });
    StandardDeviation negative = new StandardDeviation();
    negative.incrementAll(new double[] { 5.5, 7.0, 2.0, 5.0 });

    assertEquals(differentialEntropy(all) - (2.0 / 6.0) * differentialEntropy(positive)
            - (4.0 / 6.0) * differentialEntropy(negative), best.getValue().doubleValue());
}

From source file:net.sf.sessionAnalysis.SessionVisitorSessionLengthNanosStatistics.java

public double computeSessionLengthStdDev() {
    double[] lengths = computeLengthVector();
    StandardDeviation stdDevObj = new StandardDeviation();
    return stdDevObj.evaluate(lengths);
}