Example usage for org.apache.commons.math3.stat.descriptive.moment Mean getResult

List of usage examples for org.apache.commons.math3.stat.descriptive.moment Mean getResult

Introduction

In this page you can find the example usage for org.apache.commons.math3.stat.descriptive.moment Mean getResult.

Prototype

@Override
public double getResult() 

Source Link

Usage

From source file:com.cloudera.oryx.app.serving.als.LoadIT.java

@Ignore("Difficult to assert about time in cross-platform way; run manually")
@Test//from ww w. ja v  a2s.c  om
public void testRecommendLoad() throws Exception {
    AtomicLong count = new AtomicLong();
    Mean meanReqTimeMS = new Mean();
    long start = System.currentTimeMillis();

    /*
    List<Callable<Void>> tasks = new ArrayList<>(workers);
    for (int i = 0; i < workers; i++) {
      tasks.add(new LoadCallable(Integer.toString(i), meanReqTimeMS, count, start));
    }
            
    ExecutorService executor = Executors.newFixedThreadPool(WORKERS);
    try {
      executor.invokeAll(tasks);
    } finally {
      executor.shutdown();
    }
     */
    // Since latency is more important, and local machine will also be busy handling requests,
    // use few concurrent workers, like 1:
    new LoadCallable("0", meanReqTimeMS, count, start).call();

    int totalRequests = WORKERS * REQS_PER_WORKER;
    log(totalRequests, meanReqTimeMS, start);

    int cores = Runtime.getRuntime().availableProcessors();
    int allowedMS = 150 + 640 / cores; // crude, conservative empirical limit
    Assert.assertTrue("Expected < " + allowedMS + "ms / req with " + cores + " cores",
            meanReqTimeMS.getResult() < allowedMS);
}

From source file:com.cloudera.oryx.als.common.candidate.LocationSensitiveHashIT.java

@Test
public void testLSH() {
    RandomGenerator random = RandomManager.getRandom();

    Mean avgPercentTopRecsConsidered = new Mean();
    Mean avgNDCG = new Mean();
    Mean avgPercentAllItemsConsidered = new Mean();

    for (int iteration = 0; iteration < ITERATIONS; iteration++) {

        LongObjectMap<float[]> Y = new LongObjectMap<>();
        for (int i = 0; i < NUM_ITEMS; i++) {
            Y.put(i, RandomUtils.randomUnitVector(NUM_FEATURES, random));
        }//  w w  w . j a  v a  2 s  . c o m
        float[] userVec = RandomUtils.randomUnitVector(NUM_FEATURES, random);

        double[] results = doTestRandomVecs(Y, userVec);
        double percentTopRecsConsidered = results[0];
        double ndcg = results[1];
        double percentAllItemsConsidered = results[2];

        log.info("Considered {}% of all candidates, {} nDCG, got {}% recommendations correct",
                100 * percentAllItemsConsidered, ndcg, 100 * percentTopRecsConsidered);

        avgPercentTopRecsConsidered.increment(percentTopRecsConsidered);
        avgNDCG.increment(ndcg);
        avgPercentAllItemsConsidered.increment(percentAllItemsConsidered);
    }

    log.info("{}", avgPercentTopRecsConsidered.getResult());
    log.info("{}", avgNDCG.getResult());
    log.info("{}", avgPercentAllItemsConsidered.getResult());

    assertTrue(avgPercentTopRecsConsidered.getResult() > 0.85);
    assertTrue(avgNDCG.getResult() > 0.85);
    assertTrue(avgPercentAllItemsConsidered.getResult() < 0.1);
}

From source file:net.myrrix.online.candidate.LocationSensitiveHashTest.java

@Test
public void testLSH() {
    System.setProperty("model.lsh.sampleRatio", "0.1");
    System.setProperty("model.lsh.numHashes", "20");
    RandomGenerator random = RandomManager.getRandom();

    Mean avgPercentTopRecsConsidered = new Mean();
    Mean avgNDCG = new Mean();
    Mean avgPercentAllItemsConsidered = new Mean();

    for (int iteration = 0; iteration < ITERATIONS; iteration++) {

        FastByIDMap<float[]> Y = new FastByIDMap<float[]>();
        for (int i = 0; i < NUM_ITEMS; i++) {
            Y.put(i, RandomUtils.randomUnitVector(NUM_FEATURES, random));
        }/* ww w.  j  a va  2s .  co  m*/
        float[] userVec = RandomUtils.randomUnitVector(NUM_FEATURES, random);

        double[] results = doTestRandomVecs(Y, userVec);
        double percentTopRecsConsidered = results[0];
        double ndcg = results[1];
        double percentAllItemsConsidered = results[2];

        log.info("Considered {}% of all candidates, {} nDCG, got {}% recommendations correct",
                100 * percentAllItemsConsidered, ndcg, 100 * percentTopRecsConsidered);

        avgPercentTopRecsConsidered.increment(percentTopRecsConsidered);
        avgNDCG.increment(ndcg);
        avgPercentAllItemsConsidered.increment(percentAllItemsConsidered);
    }

    log.info("{}", avgPercentTopRecsConsidered.getResult());
    log.info("{}", avgNDCG.getResult());
    log.info("{}", avgPercentAllItemsConsidered.getResult());

    assertTrue(avgPercentTopRecsConsidered.getResult() > 0.55);
    assertTrue(avgNDCG.getResult() > 0.55);
    assertTrue(avgPercentAllItemsConsidered.getResult() < 0.075);
}

From source file:com.cloudera.oryx.als.common.lsh.LocationSensitiveHashIT.java

@Test
public void testLSH() {
    RandomGenerator random = RandomManager.getRandom();

    Mean avgPercentTopRecsConsidered = new Mean();
    Mean avgNDCG = new Mean();
    Mean avgPercentAllItemsConsidered = new Mean();

    for (int iteration = 0; iteration < ITERATIONS; iteration++) {

        LongObjectMap<float[]> Y = new LongObjectMap<float[]>();
        for (int i = 0; i < NUM_ITEMS; i++) {
            Y.put(i, RandomUtils.randomUnitVector(NUM_FEATURES, random));
        }/*from   w  w w  .j  a  va  2 s.  c o m*/
        float[] userVec = RandomUtils.randomUnitVector(NUM_FEATURES, random);

        double[] results = doTestRandomVecs(Y, userVec);
        double percentTopRecsConsidered = results[0];
        double ndcg = results[1];
        double percentAllItemsConsidered = results[2];

        log.info("Considered {}% of all candidates, {} nDCG, got {}% recommendations correct",
                100 * percentAllItemsConsidered, ndcg, 100 * percentTopRecsConsidered);

        avgPercentTopRecsConsidered.increment(percentTopRecsConsidered);
        avgNDCG.increment(ndcg);
        avgPercentAllItemsConsidered.increment(percentAllItemsConsidered);
    }

    log.info("{}", avgPercentTopRecsConsidered.getResult());
    log.info("{}", avgNDCG.getResult());
    log.info("{}", avgPercentAllItemsConsidered.getResult());

    assertTrue(avgPercentTopRecsConsidered.getResult() > 0.8);
    assertTrue(avgNDCG.getResult() > 0.8);
    assertTrue(avgPercentAllItemsConsidered.getResult() < 0.09);
}

From source file:com.itemanalysis.jmetrik.graph.nicc.NonparametricCurveAnalysis.java

private void initializeGridPoints() throws SQLException {
    Statement stmt = null;// w  w  w. j  a va2 s . c o  m
    ResultSet rs = null;

    //connect to db
    try {
        Table sqlTable = new Table(tableName.getNameForDatabase());
        SelectQuery select = new SelectQuery();
        select.addColumn(sqlTable, regressorVariable.getName().nameForDatabase());
        stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
        rs = stmt.executeQuery(select.toString());

        Min min = new Min();
        Max max = new Max();
        Mean mean = new Mean();
        StandardDeviation sd = new StandardDeviation();

        double value = 0.0;
        while (rs.next()) {
            value = rs.getDouble(regressorVariable.getName().nameForDatabase());
            if (!rs.wasNull()) {
                min.increment(value);
                max.increment(value);
                mean.increment(value);
                sd.increment(value);
            }
            updateProgress();
        }
        rs.close();
        stmt.close();

        //evaluation points
        double sdv = sd.getResult();
        double mn = mean.getResult();
        double lower = mn - 2.5 * sdv;
        double upper = mn + 2.5 * sdv;
        bwAdjustment *= sdv;
        bandwidth = new NonparametricIccBandwidth(sampleSize, bwAdjustment);
        gridPoints = command.getFreeOption("gridpoints").getInteger();
        //            uniformDistributionApproximation = new UniformDistributionApproximation(
        //                    min.getResult(), max.getResult(), gridPoints);
        uniformDistributionApproximation = new UniformDistributionApproximation(lower, upper, gridPoints);

    } catch (SQLException ex) {
        throw ex;
    } finally {
        if (rs != null)
            rs.close();
        if (stmt != null)
            stmt.close();
    }

}

From source file:com.itemanalysis.psychometrics.rasch.JMLE.java

/**
 * Computes PROX starting values fro items, thresholds, and persons.
 *///ww w  .java  2s  . co  m
public void prox() {
    Mean m = new Mean();
    RatingScaleItem rsi = null;
    for (VariableName v : items.keySet()) {
        rsi = items.get(v);
        rsi.prox(MPRIS(v), adjustedRIS(v));
        m.increment(rsi.getDifficulty());
    }

    for (VariableName v : items.keySet()) {
        rsi = items.get(v);
        rsi.recenter(m.getResult());
        rsi.recenterProposalDifficulty(m.getResult());
    }

    RatingScaleThresholds rst = null;
    for (String s : thresholds.keySet()) {
        rst = thresholds.get(s);
        if (!rst.extremeThreshold()) {
            rst.categoryProx(Spj(rst.getGroupId()));
            rst.recenterProposalThresholds();
            rst.recenterThresholds();
        }
    }

    for (int i = 0; i < nPeople; i++) {
        theta[i] = prox(data[i]);
    }

}

From source file:com.itemanalysis.psychometrics.rasch.JMLE.java

public void linearTransformation(DefaultLinearTransformation lt, int precision) {
    Mean pMean = new Mean();
    StandardDeviation pSd = new StandardDeviation();

    //set transformation and rescale persons
    double newScale = lt.getScale();
    double newMean = lt.getIntercept();
    double oldPersonMean = pMean.evaluate(theta);
    double oldPersonSd = pSd.evaluate(theta);

    lt.setScaleAndIntercept(oldPersonMean, newMean, oldPersonSd, newScale);

    for (int i = 0; i < theta.length; i++) {
        theta[i] = lt.transform(theta[i]);
    }/*from w  ww.j  a  va  2  s . co  m*/

    //set transformation and rescale items
    Mean iMean = new Mean();
    StandardDeviation iSd = new StandardDeviation();
    double tempDifficulty = 0.0;

    for (VariableName v : items.keySet()) {
        tempDifficulty = items.get(v).getDifficulty();
        iMean.increment(tempDifficulty);
        iSd.increment(tempDifficulty);
    }

    lt.setScaleAndIntercept(iMean.getResult(), newMean, iSd.getResult(), newScale);

    for (VariableName v : items.keySet()) {
        items.get(v).linearTransformation(lt, precision);
    }

    //set transformation and rescale thresholds
    RatingScaleThresholds tempThresholds = null;

    for (String s : thresholds.keySet()) {
        tempThresholds = thresholds.get(s);
        lt.setScaleAndIntercept(tempThresholds.getThresholdMean(), newMean,
                tempThresholds.getThresholdStandardDeviation(), newScale);
        thresholds.get(s).linearTransformation(lt, precision);
    }
}

From source file:com.itemanalysis.psychometrics.irt.estimation.JointMaximumLikelihoodEstimation.java

/**
 * Update of all persons is handled here.
 *
 * @param maxIter maximum number of iteration in the person update.
 * @param converge convergence criterion for the person update. The criterion is the maximum change in logits.
 * @param adjustment extreme score adjustment.
 * @param centerItems establish identification by centering item about the item difficulty mean (the approach
 *                    typically used in Rasch measurement). If false establish identification by centering
 *                    persons around the mean ability. Centering only done for nonextreme persons and items.
 * @return maximum observed value change in logits from updating all examinees.
 *//*from  w ww .  j a va  2  s  . c  o  m*/
private double updateAllPersons(int maxIter, double converge, double adjustment, boolean centerItems) {
    double maxDelta = 0.0;
    double tempTheta = 0.0;
    Mean personMean = new Mean();
    for (int i = 0; i < nPeople; i++) {
        tempTheta = theta[i];
        theta[i] = updatePerson(i, maxIter, converge, adjustment);
        if (extremePerson[i] == 0) {
            personMean.increment(theta[i]);
            maxDelta = Math.max(Math.abs(theta[i] - tempTheta), maxDelta);
        }
    }

    if (!centerItems) {
        double m = personMean.getResult();
        for (int i = 0; i < nPeople; i++) {
            if (extremePerson[i] == 0)
                theta[i] -= m;
        }
    }

    return maxDelta;
}

From source file:com.itemanalysis.psychometrics.irt.estimation.JointMaximumLikelihoodEstimation.java

/**
 * Computes PROX difficulty estimates for item difficulty. These are used as starting values in JMLE.
 *//*www  .j av a 2 s  .c  o m*/
public void itemProx() {
    for (int j = 0; j < nItems; j++) {
        if (droppedStatus[j] == 0 && !irm[j].isFixed()) {
            double maxItemScore = itemSummary[j].maxSip();
            double adjustedScore = itemSummary[j].Sip();
            double p = adjustedScore / maxItemScore;
            double q = 1.0 - p;
            double prox = Math.log(q / p);
            irm[j].setDifficulty(prox);
            irm[j].setProposalDifficulty(prox);

            int ncat = irm[j].getNcat();

            //threshold prox values
            if (ncat > 2) {
                double previous = 0.0;
                double current = 0.0;
                double[] threshold = new double[ncat - 1];
                RaschRatingScaleGroup group = rsg.get(irm[j].getGroupId());

                Mean tMean = new Mean();
                for (int k = 0; k < ncat; k++) {
                    current = group.SpjAt(k);
                    if (k > 0) {
                        threshold[k - 1] = Math.log(previous / current);
                        tMean.increment(threshold[k - 1]);
                    }
                    previous = current;
                }

                for (int k = 0; k < ncat - 1; k++) {
                    threshold[k] -= tMean.getResult();
                }

                irm[j].setThresholdParameters(threshold);
                irm[j].setProposalThresholds(threshold);
            }

        }

    }

}

From source file:com.itemanalysis.psychometrics.irt.estimation.JointMaximumLikelihoodEstimation.java

/**
 * Thresholds for a single rating scale group are updated in this method. Updates only involve nonextreme
 * examinees that respond to the item.//  w ww .j  a  v  a 2  s .  c  om
 *
 * @param raschRatingScaleGroup group for which thresholds are updated.
 * @return maximum change in logits for this update.
 */
private double updateThresholds(RaschRatingScaleGroup raschRatingScaleGroup) {
    double thresh = 0.0;
    int[] pos = raschRatingScaleGroup.getPositions();
    int nCat = raschRatingScaleGroup.getNumberOfCategories();
    double[] catKSum = new double[nCat];
    double[] thresholds = null;
    double[] proposalThresholds = new double[nCat - 1];
    Mean tMean = new Mean();
    double maxDelta = 0.0;
    thresholds = raschRatingScaleGroup.getThresholds();

    for (int i = 0; i < nPeople; i++) {
        if (extremePerson[i] == 0) {
            thresholds = raschRatingScaleGroup.getThresholds();
            for (int k = 0; k < nCat; k++) {
                catKSum[k] += raschRatingScaleGroup.probabilitySumAt(theta[i], k);
            }
        }
    }

    int prevCat = 0;
    int nextCat = 0;
    for (int k = 0; k < nCat - 1; k++) {
        nextCat++;
        thresh = thresholds[k];
        proposalThresholds[k] = thresh
                - Math.log(raschRatingScaleGroup.TpjAt(nextCat) / raschRatingScaleGroup.TpjAt(prevCat))
                + Math.log(catKSum[nextCat] / catKSum[prevCat]);
        //do not change threshold by more than one logit - from WINSTEPS documentation
        proposalThresholds[k] = Math.max(Math.min(thresh + 1.0, proposalThresholds[k]), thresh - 1.0);
        tMean.increment(proposalThresholds[k]);
        prevCat = nextCat;
    }

    //recenter thresholds around the mean threshold
    double m = tMean.getResult();
    for (int k = 0; k < nCat - 1; k++) {
        proposalThresholds[k] = proposalThresholds[k] - m;
        maxDelta = Math.max(Math.abs(proposalThresholds[k] - thresholds[k]), maxDelta);
    }
    raschRatingScaleGroup.setProposalThresholds(proposalThresholds);

    return maxDelta;
}