Example usage for org.apache.commons.math3.stat.descriptive.moment Mean Mean

List of usage examples for org.apache.commons.math3.stat.descriptive.moment Mean Mean

Introduction

In this page you can find the example usage for org.apache.commons.math3.stat.descriptive.moment Mean Mean.

Prototype

public Mean() 

Source Link

Document

Constructs a Mean.

Usage

From source file:com.itemanalysis.jmetrik.stats.transformation.LinearTransformationAnalysis.java

public String transformScore() throws SQLException {
    Statement stmt = null;/*from  w w w.j  a  va  2  s.com*/
    ResultSet rs = null;
    Double constrainedScore = null;

    try {
        //add variable to db
        dao.addColumnToDb(conn, tableName, addedVariableInfo);

        conn.setAutoCommit(false);//begin transaction

        Table sqlTable = new Table(tableName.getNameForDatabase());
        SelectQuery select = new SelectQuery();
        select.addColumn(sqlTable, selectedVariable.getName().nameForDatabase());
        select.addColumn(sqlTable, addedVariableInfo.getName().nameForDatabase());
        stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE);
        rs = stmt.executeQuery(select.toString());

        this.firePropertyChange("message", "", "Transforming scores...");

        double origValue = 0.0;
        double transValue = 0.0;
        double z = 0.0;

        StandardDeviation sd = new StandardDeviation();
        Mean mean = new Mean();
        Min min = new Min();
        Max max = new Max();

        while (rs.next()) {
            origValue = rs.getDouble(selectedVariable.getName().nameForDatabase());
            if (!rs.wasNull()) {
                sd.increment(origValue);
                mean.increment(origValue);
                min.increment(origValue);
                max.increment(origValue);
            }
            updateProgress();
        }

        double meanValue = mean.getResult();
        double sdValue = sd.getResult();
        double minValue = min.getResult();
        double maxValue = max.getResult();
        double A = 1.0;
        double B = 0.0;

        rs.beforeFirst();

        while (rs.next()) {
            origValue = rs.getDouble(selectedVariable.getName().nameForDatabase());
            if (!rs.wasNull()) {
                if (type1) {
                    z = (origValue - meanValue) / sdValue;
                    transValue = scaleSd * z + scaleMean;
                    transValue = checkConstraints(transValue);
                } else {
                    A = (maxPossibleScore - minPossibleScore) / (maxValue - minValue);
                    B = minPossibleScore - minValue * A;
                    transValue = origValue * A + B;
                    transValue = checkConstraints(transValue);
                }

                descriptiveStatistics.increment(transValue);

                rs.updateDouble(addedVariableInfo.getName().nameForDatabase(), transValue);
                rs.updateRow();
            }
            updateProgress();
        }

        conn.commit();
        conn.setAutoCommit(true);

        //create output
        DefaultLinearTransformation linearTransformation = new DefaultLinearTransformation();
        linearTransformation.setScale(A);
        linearTransformation.setIntercept(B);

        StringBuilder sb = new StringBuilder();
        Formatter f = new Formatter(sb);
        f.format(publishHeader());
        f.format(descriptiveStatistics.toString());
        f.format(linearTransformation.toString());
        f.format("%n");
        f.format("%n");
        return f.toString();

    } catch (SQLException ex) {
        conn.rollback();
        conn.setAutoCommit(true);
        throw ex;
    } finally {
        if (rs != null)
            rs.close();
        if (stmt != null)
            stmt.close();
    }

}

From source file:edu.uci.imbs.actor.VariablePopulationProtectionStatistics.java

private void calculateAverageBanditNumberPeasantsToPreyUpon() {
    Mean mean = new Mean();
    averageBanditNumberPeasantsToPreyUpon = mean.evaluate(numbersOfPeasantsToPreyUponDoubles);
}

From source file:com.dasasian.chok.testutil.loadtest.LoadTestMasterOperation.java

@Override
public void nodeOperationsComplete(MasterContext context, List<OperationResult> nodeResults) throws Exception {
    try {//from  w w w . j av  a 2s . c om
        final int queryRate = calculateCurrentQueryRate();
        LOG.info("collecting results for iteration " + currentIteration + " and query rate " + queryRate
                + " after " + (System.currentTimeMillis() - currentIterationStartTime) + " ms ...");
        List<LoadTestQueryResult> queryResults = new ArrayList<>();
        for (OperationResult operationResult : nodeResults) {
            if (operationResult == null || operationResult.getUnhandledError() != null) {
                Exception rootException = null;
                if (operationResult != null) {
                    //rootException = operationResult.getUnhandledError();
                }
                throw new IllegalStateException(
                        "at least one node operation did not completed properly: " + nodeResults,
                        rootException);
            }
            LoadTestNodeOperationResult nodeOperationResult = (LoadTestNodeOperationResult) operationResult;
            queryResults.addAll(nodeOperationResult.getQueryResults());
        }
        LOG.info("Received " + queryResults.size() + " queries, expected " + queryRate * runTime / 1000);

        File statisticsFile = new File(resultDir, "load-test-log-" + startTime + ".log");
        File resultsFile = new File(resultDir, "load-test-results-" + startTime + ".log");
        Writer statisticsWriter = new OutputStreamWriter(new FileOutputStream(statisticsFile, true));
        Writer resultWriter = new OutputStreamWriter(new FileOutputStream(resultsFile, true));
        if (currentIteration == 0) {
            // print headers
            statisticsWriter.append("#queryRate \tnode \tstartTime \tendTime \telapseTime \tquery \n");
            resultWriter.append(
                    "#requestedQueryRate \tachievedQueryRate \tfiredQueries \tqueryErrors \tavarageQueryDuration \tstandardDeviation  \n");
        }
        try {
            StorelessUnivariateStatistic timeStandardDeviation = new StandardDeviation();
            StorelessUnivariateStatistic timeMean = new Mean();
            int errors = 0;

            for (LoadTestQueryResult result : queryResults) {
                long elapsedTime = result.getEndTime() > 0 ? result.getEndTime() - result.getStartTime() : -1;
                statisticsWriter.write(queryRate + "\t" + result.getNodeId() + "\t" + result.getStartTime()
                        + "\t" + result.getEndTime() + "\t" + elapsedTime + "\t" + result.getQuery() + "\n");
                if (elapsedTime != -1) {
                    timeStandardDeviation.increment(elapsedTime);
                    timeMean.increment(elapsedTime);
                } else {
                    ++errors;
                }
            }
            resultWriter.write(queryRate + "\t" + ((double) queryResults.size() / (runTime / 1000)) + "\t"
                    + queryResults.size() + "\t" + errors + "\t" + (int) timeMean.getResult() + "\t"
                    + (int) timeStandardDeviation.getResult() + "\n");
        } catch (IOException e) {
            throw new IllegalStateException("Failed to write statistics data.", e);
        }
        try {
            LOG.info("results written to " + resultsFile.getAbsolutePath());
            LOG.info("statistics written to " + statisticsFile.getAbsolutePath());
            statisticsWriter.close();
            resultWriter.close();
        } catch (IOException e) {
            LOG.warn("Failed to close statistics file.");
        }
        if (queryRate + step <= endRate) {
            currentIteration++;
            LOG.info("triggering next iteration " + currentIteration);
            context.getMasterQueue().add(this);
        } else {
            LOG.info("finish load test in iteration " + currentIteration + " after "
                    + (System.currentTimeMillis() - startTime) + " ms");
            context.getProtocol().removeFlag(getName());
        }
    } catch (Exception e) {
        context.getProtocol().removeFlag(getName());
    }
}

From source file:edu.uci.imbs.actor.VariablePopulationProtectionStatistics.java

private void calculateAveragePeasantProtectionProportion() {
    Mean mean = new Mean();
    Double[] valuesDouble = new Double[distribution.keySet().size()];
    bins = unboxDoubleArray(distribution.keySet().toArray(valuesDouble));
    counts = convertIntegerCollectionToDoublesArray(distribution.values());
    averagePeasantProtectionProportion = mean.evaluate(bins, counts);
}

From source file:gedi.util.math.stat.distributions.NormalMixtureDistribution.java

public static NormalMixtureDistribution init(final double[] data, final int numComponents)
        throws NotStrictlyPositiveException, DimensionMismatchException {

    if (numComponents == 1)
        return new NormalMixtureDistribution(new NormalDistribution[] {
                new NormalDistribution(new Mean().evaluate(data), new StandardDeviation().evaluate(data)) },
                new double[] { 1 });

    if (data.length < 2) {
        throw new NotStrictlyPositiveException(data.length);
    }//  w w  w  . java2  s  .  c o  m
    if (numComponents < 1) {
        throw new NumberIsTooSmallException(numComponents, 2, true);
    }
    if (numComponents > data.length) {
        throw new NumberIsTooLargeException(numComponents, data.length, true);
    }

    final int numRows = data.length;
    double[] sortedData = data.clone();
    Arrays.sort(sortedData);

    // components of mixture model to be created
    double[] mixing = new double[numComponents];
    NormalDistribution[] comp = new NormalDistribution[numComponents];

    // create a component based on data in each bin
    for (int k = 0; k < numComponents; k++) {
        // minimum index (inclusive) from sorted data for this bin
        final int minIndex = (k * numRows) / numComponents;

        // maximum index (exclusive) from sorted data for this bin
        final int maxIndex = Math.min(numRows, ((k + 1) * numRows) / numComponents);

        double m = new Mean().evaluate(sortedData, minIndex, maxIndex - minIndex);
        double sd = new StandardDeviation().evaluate(sortedData, minIndex, maxIndex - minIndex);
        mixing[k] = 1d / numComponents;
        comp[k] = new NormalDistribution(m, sd);
    }

    return new NormalMixtureDistribution(comp, mixing);
}

From source file:com.cloudera.oryx.app.serving.als.model.ALSServingModelTest.java

@Test
public void testLSHEffect() {
    RandomGenerator random = RandomManager.getRandom();
    PoissonDistribution itemPerUserDist = new PoissonDistribution(random, 20,
            PoissonDistribution.DEFAULT_EPSILON, PoissonDistribution.DEFAULT_MAX_ITERATIONS);
    int features = 20;
    ALSServingModel mainModel = new ALSServingModel(features, true, 1.0, null);
    ALSServingModel lshModel = new ALSServingModel(features, true, 0.5, null);

    int userItemCount = 20000;
    for (int user = 0; user < userItemCount; user++) {
        String userID = "U" + user;
        float[] vec = VectorMath.randomVectorF(features, random);
        mainModel.setUserVector(userID, vec);
        lshModel.setUserVector(userID, vec);
        int itemsPerUser = itemPerUserDist.sample();
        Collection<String> knownIDs = new ArrayList<>(itemsPerUser);
        for (int i = 0; i < itemsPerUser; i++) {
            knownIDs.add("I" + random.nextInt(userItemCount));
        }//from  ww w  . jav  a  2s.c  o  m
        mainModel.addKnownItems(userID, knownIDs);
        lshModel.addKnownItems(userID, knownIDs);
    }

    for (int item = 0; item < userItemCount; item++) {
        String itemID = "I" + item;
        float[] vec = VectorMath.randomVectorF(features, random);
        mainModel.setItemVector(itemID, vec);
        lshModel.setItemVector(itemID, vec);
    }

    int numRecs = 10;
    Mean meanMatchLength = new Mean();
    for (int user = 0; user < userItemCount; user++) {
        String userID = "U" + user;
        List<Pair<String, Double>> mainRecs = mainModel
                .topN(new DotsFunction(mainModel.getUserVector(userID)), null, numRecs, null)
                .collect(Collectors.toList());
        List<Pair<String, Double>> lshRecs = lshModel
                .topN(new DotsFunction(lshModel.getUserVector(userID)), null, numRecs, null)
                .collect(Collectors.toList());
        int i = 0;
        while (i < lshRecs.size() && i < mainRecs.size() && lshRecs.get(i).equals(mainRecs.get(i))) {
            i++;
        }
        meanMatchLength.increment(i);
    }
    log.info("Mean matching prefix: {}", meanMatchLength.getResult());
    assertGreaterOrEqual(meanMatchLength.getResult(), 4.0);

    meanMatchLength.clear();
    for (int item = 0; item < userItemCount; item++) {
        String itemID = "I" + item;
        List<Pair<String, Double>> mainRecs = mainModel
                .topN(new CosineAverageFunction(mainModel.getItemVector(itemID)), null, numRecs, null)
                .collect(Collectors.toList());
        List<Pair<String, Double>> lshRecs = lshModel
                .topN(new CosineAverageFunction(lshModel.getItemVector(itemID)), null, numRecs, null)
                .collect(Collectors.toList());
        int i = 0;
        while (i < lshRecs.size() && i < mainRecs.size() && lshRecs.get(i).equals(mainRecs.get(i))) {
            i++;
        }
        meanMatchLength.increment(i);
    }
    log.info("Mean matching prefix: {}", meanMatchLength.getResult());
    assertGreaterOrEqual(meanMatchLength.getResult(), 5.0);
}

From source file:com.itemanalysis.jmetrik.graph.nicc.NonparametricCurveAnalysis.java

private void initializeGridPoints() throws SQLException {
    Statement stmt = null;/* w w  w  . j a v a  2  s.  co  m*/
    ResultSet rs = null;

    //connect to db
    try {
        Table sqlTable = new Table(tableName.getNameForDatabase());
        SelectQuery select = new SelectQuery();
        select.addColumn(sqlTable, regressorVariable.getName().nameForDatabase());
        stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        rs = stmt.executeQuery(select.toString());

        Min min = new Min();
        Max max = new Max();
        Mean mean = new Mean();
        StandardDeviation sd = new StandardDeviation();

        double value = 0.0;
        while (rs.next()) {
            value = rs.getDouble(regressorVariable.getName().nameForDatabase());
            if (!rs.wasNull()) {
                min.increment(value);
                max.increment(value);
                mean.increment(value);
                sd.increment(value);
            }
            updateProgress();
        }
        rs.close();
        stmt.close();

        //evaluation points
        double sdv = sd.getResult();
        double mn = mean.getResult();
        double lower = mn - 2.5 * sdv;
        double upper = mn + 2.5 * sdv;
        bwAdjustment *= sdv;
        bandwidth = new NonparametricIccBandwidth(sampleSize, bwAdjustment);
        gridPoints = command.getFreeOption("gridpoints").getInteger();
        //            uniformDistributionApproximation = new UniformDistributionApproximation(
        //                    min.getResult(), max.getResult(), gridPoints);
        uniformDistributionApproximation = new UniformDistributionApproximation(lower, upper, gridPoints);

    } catch (SQLException ex) {
        throw ex;
    } finally {
        if (rs != null)
            rs.close();
        if (stmt != null)
            stmt.close();
    }

}

From source file:com.itemanalysis.psychometrics.irt.equating.MeanSigmaMethodTest.java

/**
 * Tests the calculations needed for mean/mean and mean/sigma scale linking.
 * Item parameters and true values obtained from example 2 from the STUIRT
 * program by Michael Kolen and colleagues. Note that the original example
 * used teh PARSCALE version of item parameters. These were converted to
 * ICL type parameters by subtracting a step from the item difficulty.
 *
 *///from w w  w .  ja v a 2  s . co m
@Test
public void mixedFormatDescriptiveStatisticsTestFormY() {
    System.out.println("Mixed format descriptive statistics test Form Y");

    ItemResponseModel[] irm = new ItemResponseModel[17];

    irm[0] = new Irm3PL(0.887276, -1.334798, 0.134406, 1.7);
    irm[1] = new Irm3PL(1.184412, -1.129004, 0.237765, 1.7);
    irm[2] = new Irm3PL(0.609412, -1.464546, 0.15139, 1.7);
    irm[3] = new Irm3PL(0.923812, -0.576435, 0.240097, 1.7);
    irm[4] = new Irm3PL(0.822776, -0.476357, 0.192369, 1.7);
    irm[5] = new Irm3PL(0.707818, -0.235189, 0.189557, 1.7);
    irm[6] = new Irm3PL(1.306976, 0.242986, 0.165553, 1.7);
    irm[7] = new Irm3PL(1.295471, 0.598029, 0.090557, 1.7);
    irm[8] = new Irm3PL(1.366841, 0.923206, 0.172993, 1.7);
    irm[9] = new Irm3PL(1.389624, 1.380666, 0.238008, 1.7);
    irm[10] = new Irm3PL(0.293806, 2.02807, 0.203448, 1.7);
    irm[11] = new Irm3PL(0.885347, 3.152928, 0.195473, 1.7);

    double[] step1 = { 0, -1.387347, 0.399117 };
    irm[12] = new IrmGPCM(0.346324, step1, 1.7);

    double[] step2 = { 0, 0.756514, 0.956014 };
    irm[13] = new IrmGPCM(1.252012, step2, 1.7);

    double[] step3 = { 0, 0.975303, 4.676299 };
    irm[14] = new IrmGPCM(0.392282, step3, 1.7);

    double[] step4 = { 0, 0.643405, -0.418869, 0.804394 };
    irm[15] = new IrmGPCM(0.660841, step4, 1.7);

    double[] step5 = { 0, 0.641293, 1.750488, 2.53802 };
    irm[16] = new IrmGPCM(0.669612, step5, 1.7);

    Mean discriminationX = new Mean();
    Mean difficultyX = new Mean();

    Mean difficultyMeanX = new Mean();
    StandardDeviation difficultySdX = new StandardDeviation(false);//Do not correct for bias. Use N in the denominator, not N-1.

    for (int j = 0; j < 17; j++) {
        irm[j].incrementMeanMean(discriminationX, difficultyX);
        irm[j].incrementMeanSigma(difficultyMeanX, difficultySdX);
    }

    //        System.out.println("Mean/mean descriptive statistics for Form X");
    //        System.out.println("a-mean: " + discriminationX.getResult());
    //        System.out.println("b-mean: " + difficultyX.getResult());

    assertEquals("Mean/mean check: discrimination mean", 0.8820,
            Precision.round(discriminationX.getResult(), 4), 1e-5);
    assertEquals("Mean/mean check: difficulty mean", 0.6435, Precision.round(difficultyX.getResult(), 4), 1e-5);
    assertEquals("Mean/mean check: Number of difficulties (including steps) ", 24, difficultyX.getN(), 1e-3);

    //        System.out.println();
    //        System.out.println("Mean/sigma descriptive statistics for Form X");
    //        System.out.println("b-mean: " + difficultyMeanX.getResult());
    //        System.out.println("b-sd: " + difficultySdX.getResult());
    //        System.out.println("b-N: " + difficultyMeanX.getN() + ",   " + difficultySdX.getN());

    assertEquals("Mean/sigma check: difficulty mean", 0.6435, Precision.round(difficultyMeanX.getResult(), 4),
            1e-5);
    assertEquals("Mean/sigma check: difficulty sd", 1.4527, Precision.round(difficultySdX.getResult(), 4),
            1e-5);
    assertEquals("Mean/sigma check: Number of difficulties (including steps) ", 24, difficultyMeanX.getN(),
            1e-3);
    assertEquals("Mean/sigma check: Number of difficulties (including steps) ", 24, difficultySdX.getN(), 1e-3);

}

From source file:net.sf.javaml.clustering.AQBC.java

/**
 * Normalizes the data to mean 0 and standard deviation 1. This method
 * discards all instances that cannot be normalized, i.e. they have the same
 * value for all attributes./*from   w  ww.  ja  va  2  s  .c o  m*/
 * 
 * @param data
 * @return
 */
private Vector<TaggedInstance> normalize(Dataset data) {
    Vector<TaggedInstance> out = new Vector<TaggedInstance>();

    for (int i = 0; i < data.size(); i++) {
        Double[] old = data.instance(i).values().toArray(new Double[0]);
        double[] conv = new double[old.length];
        for (int j = 0; j < old.length; j++) {
            conv[j] = old[j];
        }

        Mean m = new Mean();

        double MU = m.evaluate(conv);
        // System.out.println("MU = "+MU);
        StandardDeviation std = new StandardDeviation();
        double SIGM = std.evaluate(conv, MU);
        // System.out.println("SIGM = "+SIGM);
        if (!MathUtils.eq(SIGM, 0)) {
            double[] val = new double[old.length];
            for (int j = 0; j < old.length; j++) {
                val[j] = (float) ((old[j] - MU) / SIGM);

            }
            // System.out.println("VAL "+i+" = "+Arrays.toString(val));
            out.add(new TaggedInstance(new DenseInstance(val, data.instance(i).classValue()), i));
        }
    }
    // System.out.println("FIRST = "+out.get(0));

    return out;
}

From source file:feature.lowLevel.audio.FeatureExtractor.java

private double[] getStatisticalSpectrumDescriptor(double[] dataRow) {
    int N = dataRow.length;
    double ssd[] = new double[N_STATISTICAL_DESCRIPTORS];

    ssd[0] = new Mean().evaluate(dataRow);
    ssd[1] = new Variance().evaluate(dataRow);
    ssd[2] = new Skewness().evaluate(dataRow);
    ssd[3] = new Kurtosis().evaluate(dataRow);

    // NOTE: be careful, sort changes data!
    // (as sonogram.getRow() above copies the data anyway, in this case there is no problem)
    // (otherwise, use dataRow.clone(); )
    Arrays.sort(dataRow);//from   w ww.  ja v  a  2 s .c o  m

    // median
    if (N % 2 == 0) {
        ssd[4] = (dataRow[(N / 2) - 1] + dataRow[(N / 2)]) / 2;
    } else {
        ssd[4] = dataRow[(N - 1) / 2];
    }

    ssd[5] = dataRow[0]; // min value
    ssd[6] = dataRow[N - 1]; // max value

    return ssd;
}