Example usage for org.apache.commons.math3.stat.descriptive.moment StandardDeviation StandardDeviation

List of usage examples for org.apache.commons.math3.stat.descriptive.moment StandardDeviation StandardDeviation

Introduction

In this page you can find the example usage for org.apache.commons.math3.stat.descriptive.moment StandardDeviation StandardDeviation.

Prototype

public StandardDeviation() 

Source Link

Document

Constructs a StandardDeviation.

Usage

From source file:in.ac.iitb.cse.cartsbusboarding.acc.FeatureCalculator.java

/**
 * Calculates StDev of whatever data is given to this function
 *
 * @return//from   ww w .j  a v a2s  .c  o m
 */
private double calculateStd(double input[]) {
    return (new StandardDeviation()).evaluate(input);
}

From source file:com.itemanalysis.psychometrics.irt.estimation.StartingValues.java

/**
 * Computes normal approximation estimates (PROX) of item difficulty and person ability
 * in a way that allows for missing data (Linacre, 1994). It is an iterative procedure.
 *
 * Linacre, J. M., (1994). PROX with missing data, or known item or person measures.
 * Rasch Measurement Transactions, 8:3, 378, http://www.rasch.org/rmt/rmt83g.htm.
 *
 * @param converge convergence criterion as the maximum change in person logits.
 * @param maxIter maximum number of iterations. About 10 iterations works well.
 *//*ww  w  . ja va2s. com*/
private void prox(double converge, int maxIter) {
    double delta = 1.0 + converge;
    int iter = 0;
    double pProx = 0;
    double pScore = 0;
    double maxTestScore = 0;
    double maxChange = 0;
    double logit = 0;

    Mean personGrandMean = new Mean();
    StandardDeviation personGrandSd = new StandardDeviation();
    double iProx = 0.0;
    double iMean = 0;
    theta = new double[nResponseVectors];

    Mean[] mPerson = new Mean[nItems];//Item difficulty mean for those examinees completing item j
    StandardDeviation[] sdPerson = new StandardDeviation[nItems];//Item difficulty standard deviation for those examinees completing item j
    double[] Si = null;
    double[] Ni = null;

    Mean[] mItem = new Mean[nResponseVectors];
    StandardDeviation[] sdItem = new StandardDeviation[nResponseVectors];

    while (delta > converge && iter < maxIter) {
        Si = new double[nItems];
        Ni = new double[nItems];

        //Compute descriptive statistics for persons and items
        double resp = 0;
        double freq = 0;
        for (int l = 0; l < nResponseVectors; l++) {
            freq = responseVector[l].getFrequency();

            for (int j = 0; j < nItems; j++) {

                //initialize arrays
                if (l == 0) {
                    mPerson[j] = new Mean();
                    sdPerson[j] = new StandardDeviation();
                }

                if (j == 0) {
                    mItem[l] = new Mean();
                    sdItem[l] = new StandardDeviation();
                }

                if (irm[j].getType() == IrmType.L3 || irm[j].getType() == IrmType.L4) {

                    resp = responseVector[l].getResponseAt(j);

                    //increment item and person summary statistics
                    if (resp != -1) {
                        //incorporate weights - crude workaround
                        for (int w = 0; w < freq; w++) {
                            mItem[l].increment(irm[j].getDifficulty());
                            sdItem[l].increment(irm[j].getDifficulty());

                            mPerson[j].increment(theta[l]);
                            sdPerson[j].increment(theta[l]);
                            Si[j] += resp;
                            Ni[j]++;
                        }

                    }
                }
            } //end item loop

        } //end summary loop

        //Compute item PROX for binary items only
        iMean = 0;
        double pSd = 1e-8;
        double ni = 0;
        for (int j = 0; j < nItems; j++) {
            if (irm[j].getType() == IrmType.L3 || irm[j].getType() == IrmType.L4) {
                pSd = sdPerson[j].getResult();

                //adjust extreme item scores
                if (Si[j] == 0)
                    Si[j] += 0.3;
                if (Si[j] == Ni[j])
                    Si[j] -= 0.3;

                logit = Math.log(Si[j] / (Ni[j] - Si[j]));
                iProx = mPerson[j].getResult() - Math.sqrt(1.0 + pSd / 2.9) * logit;
                irm[j].setDifficulty(iProx);
                iMean += iProx;
                ni++;
            }
        }
        iMean /= ni;

        //center difficulties about the mean item difficulty
        for (int j = 0; j < nItems; j++) {
            if (irm[j].getType() == IrmType.L3 || irm[j].getType() == IrmType.L4) {
                iProx = irm[j].getDifficulty();
                irm[j].setDifficulty(iProx - iMean);
            }
        }

        //Compute person PROX
        maxChange = 0;
        personGrandMean.clear();
        personGrandSd.clear();
        Pair<Double, Double> personScores = null;
        for (int l = 0; l < nResponseVectors; l++) {
            personScores = computePersonScores(responseVector[l]);
            pScore = personScores.getFirst();
            maxTestScore = personScores.getSecond();

            //adjust extreme person scores
            if (pScore == 0)
                pScore += 0.3;
            if (pScore == maxTestScore)
                pScore -= 0.3;

            logit = Math.log(pScore / (maxTestScore - pScore));
            pProx = mItem[l].getResult() + Math.sqrt(1.0 + sdItem[l].getResult() / 2.9) * logit;
            maxChange = Math.max(maxChange, Math.abs(theta[l] - pProx));
            theta[l] = pProx;
            personGrandMean.increment(pProx);
            personGrandSd.increment(pProx);
        }

        delta = maxChange;
        iter++;

        fireEMStatusEvent(iter, delta, Double.NaN);

    } //end while

    //Linearly transform theta estimate to have a mean of 0 and a standard deviation of 1.
    //Apply the same transformation to item difficulty values.
    double A = 1.0 / personGrandSd.getResult();
    double B = -A * personGrandMean.getResult();

    for (int l = 0; l < nResponseVectors; l++) {
        theta[l] = theta[l] * A + B;
    }

    double a = 1;
    double b = 0;
    for (int j = 0; j < nItems; j++) {
        if (irm[j].getType() == IrmType.L3 || irm[j].getType() == IrmType.L3) {
            b = irm[j].getDifficulty();
            irm[j].setDifficulty(b * A + B);

            //Adjust discrimination parameter for scaling constant.
            //PROX assumes a logit scale. This conversion is to convert to the normal metric.
            a = irm[j].getDiscrimination();
            irm[j].setDiscrimination(a / irm[j].getScalingConstant());
        }
    }

    //For debugging
    //        System.out.println("ITER: " + iter);
    //        for(int j=0;j<nItems;j++){
    //            System.out.println("PROX: " + irm[j].toString());
    //        }

}

From source file:com.vmware.photon.controller.cloudstore.xenon.entity.SchedulingConstantGeneratorTest.java

/**
 * Compute the coefficient of variation of the gaps between adjacent
 * scheduling constants.//from  ww w .java 2s  .  c om
 *
 * @param schedulingConstants Sorted list of scheduling constants.
 */
private double schedulingConstantGapCV(List<Long> schedulingConstants) {
    // Compute the difference between each scheduling constant and the next.
    double[] gaps = new double[schedulingConstants.size()];

    for (int i = 0; i < schedulingConstants.size(); i++) {
        long gap;

        // Special case at end of list: wrap around
        if (i == schedulingConstants.size() - 1) {
            gap = schedulingConstants.get(0) - schedulingConstants.get(i) + 10000;
        } else {
            gap = schedulingConstants.get(i + 1) - schedulingConstants.get(i);
        }

        gaps[i] = (double) gap;
    }

    // Compute coefficient of variation
    double gapMean = new Mean().evaluate(gaps);
    double gapSD = new StandardDeviation().evaluate(gaps);
    return gapSD / gapMean;
}

From source file:com.itemanalysis.psychometrics.rasch.JMLE.java

public void linearTransformation(DefaultLinearTransformation lt, int precision) {
    Mean pMean = new Mean();
    StandardDeviation pSd = new StandardDeviation();

    //set transformation and rescale persons
    double newScale = lt.getScale();
    double newMean = lt.getIntercept();
    double oldPersonMean = pMean.evaluate(theta);
    double oldPersonSd = pSd.evaluate(theta);

    lt.setScaleAndIntercept(oldPersonMean, newMean, oldPersonSd, newScale);

    for (int i = 0; i < theta.length; i++) {
        theta[i] = lt.transform(theta[i]);
    }/*from  w  ww .j a  v a2  s  .  c om*/

    //set transformation and rescale items
    Mean iMean = new Mean();
    StandardDeviation iSd = new StandardDeviation();
    double tempDifficulty = 0.0;

    for (VariableName v : items.keySet()) {
        tempDifficulty = items.get(v).getDifficulty();
        iMean.increment(tempDifficulty);
        iSd.increment(tempDifficulty);
    }

    lt.setScaleAndIntercept(iMean.getResult(), newMean, iSd.getResult(), newScale);

    for (VariableName v : items.keySet()) {
        items.get(v).linearTransformation(lt, precision);
    }

    //set transformation and rescale thresholds
    RatingScaleThresholds tempThresholds = null;

    for (String s : thresholds.keySet()) {
        tempThresholds = thresholds.get(s);
        lt.setScaleAndIntercept(tempThresholds.getThresholdMean(), newMean,
                tempThresholds.getThresholdStandardDeviation(), newScale);
        thresholds.get(s).linearTransformation(lt, precision);
    }
}

From source file:org.datavec.api.transform.reduce.Reducer.java

private Writable reduceLongColumn(ReduceOp op, List<Writable> values, boolean ignoreInvalid,
        ColumnMetaData metaData) {//from   w w w  .j a  va2 s . co m
    switch (op) {
    case Min:
        long min = Long.MAX_VALUE;
        for (Writable w : values) {
            if (ignoreInvalid && !metaData.isValid(w))
                continue;
            min = Math.min(min, w.toLong());
        }
        return new LongWritable(min);
    case Max:
        long max = Long.MIN_VALUE;
        for (Writable w : values) {
            if (ignoreInvalid && !metaData.isValid(w))
                continue;
            max = Math.max(max, w.toLong());
        }
        return new LongWritable(max);
    case Range:
        long min2 = Long.MAX_VALUE;
        long max2 = Long.MIN_VALUE;
        for (Writable w : values) {
            if (ignoreInvalid && !metaData.isValid(w))
                continue;
            long l = w.toLong();
            min2 = Math.min(min2, l);
            max2 = Math.max(max2, l);
        }
        return new LongWritable(max2 - min2);
    case Sum:
    case Mean:
        long sum = 0;
        int count = 0;
        for (Writable w : values) {
            if (ignoreInvalid && !metaData.isValid(w))
                continue;
            sum += w.toLong();
            count++;
        }
        if (op == ReduceOp.Sum)
            return new LongWritable(sum);
        else if (count > 0)
            return new DoubleWritable(((double) sum) / count);
        else
            return new DoubleWritable(0.0);
    case Stdev:
        double[] arr = new double[values.size()];
        int i = 0;
        int countValid = 0;
        for (Writable w : values) {
            if (ignoreInvalid && !metaData.isValid(w))
                continue;
            arr[i++] = w.toLong();
            countValid++;
        }
        if (ignoreInvalid && countValid < arr.length) {
            arr = Arrays.copyOfRange(arr, 0, countValid);
        }
        return new DoubleWritable(new StandardDeviation().evaluate(arr));
    case Count:
        if (ignoreInvalid) {
            int countValid2 = 0;
            for (Writable w : values) {
                if (!metaData.isValid(w))
                    continue;
                countValid2++;
            }
            return new IntWritable(countValid2);
        }
        return new IntWritable(values.size());
    case CountUnique:
        Set<Long> set = new HashSet<>();
        for (Writable w : values) {
            if (ignoreInvalid && !metaData.isValid(w))
                continue;
            set.add(w.toLong());
        }
        return new IntWritable(set.size());
    case TakeFirst:
        if (values.size() > 0)
            return values.get(0);
        return new LongWritable(0);
    case TakeLast:
        if (values.size() > 0)
            return values.get(values.size() - 1);
        return new LongWritable(0);
    default:
        throw new UnsupportedOperationException("Unknown or not implement op: " + op);
    }
}

From source file:org.datavec.api.transform.reduce.Reducer.java

private Writable reduceDoubleColumn(ReduceOp op, List<Writable> values, boolean ignoreInvalid,
        ColumnMetaData metaData) {/*  www  .ja v a2s. co  m*/
    switch (op) {
    case Min:
        double min = Double.MAX_VALUE;
        for (Writable w : values) {
            if (ignoreInvalid && !metaData.isValid(w))
                continue;
            min = Math.min(min, w.toDouble());
        }
        return new DoubleWritable(min);
    case Max:
        double max = -Double.MAX_VALUE;
        for (Writable w : values) {
            if (ignoreInvalid && !metaData.isValid(w))
                continue;
            max = Math.max(max, w.toDouble());
        }
        return new DoubleWritable(max);
    case Range:
        double min2 = Double.MAX_VALUE;
        double max2 = -Double.MAX_VALUE;
        for (Writable w : values) {
            if (ignoreInvalid && !metaData.isValid(w))
                continue;
            double d = w.toDouble();
            min2 = Math.min(min2, d);
            max2 = Math.max(max2, d);
        }
        return new DoubleWritable(max2 - min2);
    case Sum:
    case Mean:
        double sum = 0;
        int count = 0;
        for (Writable w : values) {
            if (ignoreInvalid && !metaData.isValid(w))
                continue;
            sum += w.toDouble();
            count++;
        }
        if (op == ReduceOp.Sum)
            return new DoubleWritable(sum);
        else if (count > 0)
            return new DoubleWritable(sum / count);
        else
            return new DoubleWritable(0.0);
    case Stdev:
        double[] arr = new double[values.size()];
        int i = 0;
        int countValid = 0;
        for (Writable w : values) {
            if (ignoreInvalid && !metaData.isValid(w))
                continue;
            arr[i++] = w.toDouble();
            countValid++;
        }
        if (ignoreInvalid && countValid < arr.length) {
            arr = Arrays.copyOfRange(arr, 0, countValid);
        }
        return new DoubleWritable(new StandardDeviation().evaluate(arr));
    case Count:
        if (ignoreInvalid) {
            int countValid2 = 0;
            for (Writable w : values) {
                if (!metaData.isValid(w))
                    continue;
                countValid2++;
            }
            return new IntWritable(countValid2);
        }
        return new IntWritable(values.size());
    case CountUnique:
        Set<Double> set = new HashSet<>();
        for (Writable w : values) {
            if (ignoreInvalid && !metaData.isValid(w))
                continue;
            set.add(w.toDouble());
        }
        return new IntWritable(set.size());
    case TakeFirst:
        if (values.size() > 0)
            return values.get(0);
        return new DoubleWritable(0.0);
    case TakeLast:
        if (values.size() > 0)
            return values.get(values.size() - 1);
        return new DoubleWritable(0.0);
    default:
        throw new UnsupportedOperationException("Unknown or not implement op: " + op);
    }
}

From source file:org.drugis.mtc.summary.NormalSummary.java

private synchronized void calculateResults() {
    if (!isReady())
        return;/*from  w  w w  .j a  v  a2s. c  om*/
    List<Double> samples = SummaryUtil.getAllChainsLastHalfSamples(d_results, d_parameter);
    d_mean = SummaryUtil.evaluate(new Mean(), samples);
    d_stdev = SummaryUtil.evaluate(new StandardDeviation(), samples);
    d_defined = true;
    firePropertyChange(PROPERTY_DEFINED, null, d_defined);
    firePropertyChange(PROPERTY_MEAN, null, d_mean);
    firePropertyChange(PROPERTY_STANDARD_DEVIATION, null, d_stdev);
}

From source file:org.drugis.mtc.yadas.ContinuousDataIT.java

@Before
public void setUp() throws JAXBException {
    // data from Welton et. al., Am J Epidemiol 2009;169:1158-1165
    d_m = -1.362791; // mean(d)
    d_s = 0.982033; // sd(d)
    d_mean = new Mean();
    d_stdDev = new StandardDeviation();

    InputStream is = ContinuousDataIT.class.getResourceAsStream("weltonBP.xml");
    d_network = JAXBHandler.readNetwork(is);

    d_psych = new Treatment("psych");
    d_usual = new Treatment("usual");
}

From source file:org.hawkular.datamining.forecast.utils.Utils.java

public static double standardDeviation(Double[] residuals) {
    double[] primitiveResiduals = ArrayUtils.toPrimitive(residuals);
    StandardDeviation standardDeviation = new StandardDeviation();
    return standardDeviation.evaluate(primitiveResiduals);
}

From source file:org.jpmml.evaluator.functions.StandardDeviationFunction.java

static private Double evaluate(Collection<?> values, boolean biasCorrected) {
    StandardDeviation statistic = new StandardDeviation();
    statistic.setBiasCorrected(biasCorrected);

    for (Object value : values) {
        Double doubleValue = (Double) TypeUtil.parseOrCast(DataType.DOUBLE, value);

        statistic.increment(doubleValue);
    }//from   ww w.  j  av a 2 s. c o  m

    return statistic.getResult();
}