Example usage for org.apache.commons.math3.stat.descriptive.moment Mean getResult

List of usage examples for org.apache.commons.math3.stat.descriptive.moment Mean getResult

Introduction

In this page you can find the example usage for org.apache.commons.math3.stat.descriptive.moment Mean getResult.

Prototype

@Override
public double getResult() 

Source Link

Usage

From source file:com.cloudera.oryx.app.serving.als.LoadBenchmark.java

private static void log(long currentCount, Mean meanReqTimeMS, long start) {
    long used = JVMUtils.getUsedMemory() / 1_000_000;
    long requestsPerSecond = Math.round((1000.0 * currentCount) / (System.currentTimeMillis() - start));
    long msPerRequest;
    synchronized (meanReqTimeMS) {
        msPerRequest = Math.round(meanReqTimeMS.getResult());
    }//from   w  ww . j  a  va2 s.c o m
    log.info("{} complete\t{} req/s\t~{} ms/req\t{}MB heap", currentCount, requestsPerSecond, msPerRequest,
            used);
}

From source file:com.sciaps.utils.Util.java

public static Spectrum createAverage(Collection<? extends Spectrum> shots, double sampleRate) {

    Min minWL = new Min();
    Max maxWL = new Max();
    for (Spectrum shot : shots) {
        minWL.increment(shot.getValidRange().getMinimumDouble());
        maxWL.increment(shot.getValidRange().getMaximumDouble());
    }//from  ww w. j a v  a2 s  .c  o m

    double range = maxWL.getResult() - minWL.getResult();
    int numSamples = (int) Math.floor(range * sampleRate);
    double[][] data = new double[2][numSamples];
    Mean avgy = new Mean();
    for (int i = 0; i < numSamples; i++) {
        double x = minWL.getResult() + i * (1 / sampleRate);
        avgy.clear();
        for (Spectrum shot : shots) {
            if (shot.getValidRange().containsDouble(x)) {
                UnivariateFunction iv = shot.getIntensityFunction();
                double y = iv.value(x);
                avgy.increment(y);
            }
        }

        data[0][i] = x;
        data[1][i] = avgy.getResult();
    }

    RawDataSpectrum newSpectrum = new RawDataSpectrum(data);

    return newSpectrum;
}

From source file:creative.framework.main.ApparelContext.java

private StringBuilder meansToString(List<Mean> means) {
    StringBuilder m = new StringBuilder();
    NumberFormat formatter = new DecimalFormat("#0.00");

    for (Mean mean : means) {
        m.append(formatter.format(mean.getResult())).append(" ");
    }/* w w w  .ja v a2 s.  c  o m*/
    return m;
}

From source file:eu.crisis_economics.abm.algorithms.series.TestAbstractSeries.java

/**
  * Assert that the long term mean of values drawn from a {@link RandomSeries} object
  * is as expected.//w w  w  .j  a v a2 s .c  o m
  * 
  * @param series (<code>S</code>) <br>
  *        The {@link RandomSeries} object to test.
  * @param numberOfSamples <br>
  *        The number of samples to draw from <code>S</code>.
  * @param expectedLongTermMean <br>
  *        The expected long term mean of the series.
  */
protected void assertLongTermMean(RandomSeries series, final int numberOfSamples,
        final double expectedLongTermMean) {
    final Mean mean = new Mean();

    for (int i = 0; i < numberOfSamples; ++i)
        mean.increment(series.next());

    Assert.assertEquals(mean.getResult(), expectedLongTermMean, 1.e-2);
}

From source file:com.cloudera.oryx.app.speed.rdf.RDFSpeedModelManager.java

@Override
public Iterable<String> buildUpdates(JavaPairRDD<String, String> newData) {
    if (model == null) {
        return Collections.emptyList();
    }//from w ww  . j  a  va2  s .co m

    JavaRDD<Example> examplesRDD = newData.values().map(MLFunctions.PARSE_FN)
            .map(new ToExampleFn(inputSchema, model.getEncodings()));

    DecisionForest forest = model.getForest();
    JavaPairRDD<Pair<Integer, String>, Iterable<Feature>> targetsByTreeAndID = examplesRDD
            .flatMapToPair(new ToTreeNodeFeatureFn(forest)).groupByKey();

    List<String> updates = new ArrayList<>();

    if (inputSchema.isClassification()) {

        List<Tuple2<Pair<Integer, String>, Map<Integer, Long>>> countsByTreeAndID = targetsByTreeAndID
                .mapValues(new TargetCategoryCountFn()).collect();
        for (Tuple2<Pair<Integer, String>, Map<Integer, Long>> p : countsByTreeAndID) {
            Integer treeID = p._1().getFirst();
            String nodeID = p._1().getSecond();
            updates.add(TextUtils.joinJSON(Arrays.asList(treeID, nodeID, p._2())));
        }

    } else {

        List<Tuple2<Pair<Integer, String>, Mean>> meanTargetsByTreeAndID = targetsByTreeAndID
                .mapValues(new MeanNewTargetFn()).collect();
        for (Tuple2<Pair<Integer, String>, Mean> p : meanTargetsByTreeAndID) {
            Integer treeID = p._1().getFirst();
            String nodeID = p._1().getSecond();
            Mean mean = p._2();
            updates.add(TextUtils.joinJSON(Arrays.asList(treeID, nodeID, mean.getResult(), mean.getN())));
        }

    }

    return updates;
}

From source file:com.cloudera.oryx.als.computation.iterate.row.RowStep.java

@Override
protected MRPipeline createPipeline() throws IOException {

    IterationState iterationState = getIterationState();
    String iterationKey = iterationState.getIterationKey();
    boolean x = iterationState.isComputingX();
    int lastIteration = iterationState.getIteration() - 1;
    Store store = Store.get();//  w  w w  . j a va 2s  . co  m

    JobStepConfig config = getConfig();
    String instanceDir = config.getInstanceDir();
    int generationID = config.getGenerationID();

    if (store.exists(Namespaces.getInstanceGenerationPrefix(instanceDir, generationID) + "X/", false)) {
        // Actually, looks like whole computation of X/Y finished -- just proceed
        return null;
    }

    // Take the opportunity to clean out iteration before last, if computing X
    if (x) {
        String lastLastIterationKey = Namespaces.getIterationsPrefix(instanceDir, generationID)
                + (lastIteration - 1) + '/';
        if (store.exists(lastLastIterationKey, false)) {
            log.info("Deleting old iteration data from {}", lastLastIterationKey);
            store.recursiveDelete(lastLastIterationKey);
        }
    }

    String yKey;
    if (x) {
        yKey = Namespaces.getIterationsPrefix(instanceDir, generationID) + lastIteration + "/Y/";
    } else {
        yKey = iterationKey + "X/";
    }

    String xKey = iterationKey + (x ? "X/" : "Y/");
    String tempKey = Namespaces.getTempPrefix(instanceDir, generationID);
    String rKey = tempKey + (x ? "userVectors/" : "itemVectors/");

    if (!validOutputPath(xKey)) {
        return null;
    }

    MRPipeline p = createBasicPipeline(RowReduceFn.class);
    Configuration conf = p.getConfiguration();
    conf.set(Y_KEY_KEY, yKey);

    String popularKey = tempKey + (x ? "popularItemsByUserPartition/" : "popularUsersByItemPartition/");
    conf.set(POPULAR_KEY, popularKey);

    String testPrefix = Namespaces.getInstanceGenerationPrefix(instanceDir, generationID) + "test/";
    conf.set(MAP_KEY, testPrefix);

    YState yState = new YState(ALSTypes.DENSE_ROW_MATRIX); // Shared Y-Matrix state

    GroupingOptions opts = groupingOptions();
    PCollection<MatrixRow> matrix = PTables.asPTable(p.read(input(rKey, ALSTypes.SPARSE_ROW_MATRIX)))
            .groupByKey(opts).parallelDo("rowReduce", new RowReduceFn(yState), ALSTypes.DENSE_ROW_MATRIX)
            .write(output(xKey));

    if (!x) {
        matrix.parallelDo("asPair", MatrixRow.AS_PAIR, Avros.tableOf(Avros.longs(), ALSTypes.FLOAT_ARRAY))
                .parallelDo("convergenceSample", new ConvergenceSampleFn(yState), Avros.strings())
                .write(compressedTextOutput(p.getConfiguration(), iterationKey + "Yconvergence"));
    }

    if (x && ConfigUtils.getDefaultConfig().getDouble("model.test-set-fraction") > 0.0
            && store.exists(testPrefix, false)) {
        PCollection<Double> aps = matrix
                .parallelDo("asPair", MatrixRow.AS_PAIR, Avros.tableOf(Avros.longs(), ALSTypes.FLOAT_ARRAY))
                .parallelDo("computeAP", new ComputeUserAPFn(yState), Avros.doubles());
        Mean meanAveragePrecision = new Mean();
        for (double ap : aps.materialize()) {
            meanAveragePrecision.increment(ap);
        }
        log.info("Mean average precision: {}", meanAveragePrecision.getResult());

        File tempMAPFile = File.createTempFile("MAP", ".txt");
        tempMAPFile.deleteOnExit();
        Files.write(Double.toString(meanAveragePrecision.getResult()), tempMAPFile, StandardCharsets.UTF_8);
        store.upload(iterationKey + "MAP", tempMAPFile, false);
        IOUtils.delete(tempMAPFile);
    }

    return p;
}

From source file:com.cloudera.oryx.rdf.computation.RDFDistributedGenerationRunner.java

@Override
protected void doPost() throws IOException {

    String instanceGenerationPrefix = Namespaces.getInstanceGenerationPrefix(getInstanceDir(),
            getGenerationID());//from  w w  w  .j  a va2 s .  c o m
    String outputPathKey = instanceGenerationPrefix + "trees/";
    Store store = Store.get();
    PMML joinedForest = null;

    // TODO This is still loading all trees into memory, which can be quite large.
    // To do better we would have to manage XML output more directly.

    Map<String, Mean> columnNameToMeanImportance = Maps.newHashMap();

    for (String treePrefix : store.list(outputPathKey, true)) {
        log.info("Reading trees from file {}", treePrefix);
        for (String treePMMLAsLine : new FileLineIterable(store.readFrom(treePrefix))) {
            PMML treePMML;
            try {
                treePMML = IOUtil.unmarshal(new InputSource(new StringReader(treePMMLAsLine)));
            } catch (SAXException e) {
                throw new IOException(e);
            } catch (JAXBException e) {
                throw new IOException(e);
            }

            if (joinedForest == null) {
                joinedForest = treePMML;
                updateMeanImportances(columnNameToMeanImportance, treePMML.getModels().get(0));
            } else {
                MiningModel existingModel = (MiningModel) joinedForest.getModels().get(0);
                MiningModel nextModel = (MiningModel) treePMML.getModels().get(0);
                updateMeanImportances(columnNameToMeanImportance, nextModel);
                existingModel.getSegmentation().getSegments().addAll(nextModel.getSegmentation().getSegments());
            }
        }
    }

    // Stitch together feature importances
    for (MiningField field : joinedForest.getModels().get(0).getMiningSchema().getMiningFields()) {
        String name = field.getName().getValue();
        Mean importance = columnNameToMeanImportance.get(name);
        if (importance == null) {
            field.setImportance(null);
        } else {
            field.setImportance(importance.getResult());
        }
    }

    log.info("Writing combined model file");
    File tempJoinedForestFile = File.createTempFile("model-", ".pmml.gz");
    tempJoinedForestFile.deleteOnExit();
    OutputStream out = IOUtils.buildGZIPOutputStream(new FileOutputStream(tempJoinedForestFile));
    try {
        IOUtil.marshal(joinedForest, out);
    } catch (JAXBException e) {
        throw new IOException(e);
    } finally {
        out.close();
    }

    log.info("Uploading combined model file");
    store.upload(instanceGenerationPrefix + "model.pmml.gz", tempJoinedForestFile, false);
    IOUtils.delete(tempJoinedForestFile);
}

From source file:com.cloudera.oryx.als.computation.iterate.row.ComputeUserAPFn.java

@Override
public void process(Pair<Long, float[]> input, Emitter<Double> emitter) {

    LongSet ids = testData.get(input.first());
    if (ids == null || ids.isEmpty()) {
        return;//from ww  w  .  ja v  a 2  s.  co m
    }

    float[] userVector = input.second();
    LongObjectMap<float[]> Y = yState.getY();
    long[] itemIDs = ids.toArray();

    double[] scores = new double[itemIDs.length];
    for (int i = 0; i < itemIDs.length; i++) {
        long itemID = itemIDs[i];
        float[] itemVector = Y.get(itemID);
        if (itemVector == null) {
            continue;
        }
        scores[i] = SimpleVectorMath.dot(userVector, itemVector);
    }

    int[] rank = new int[itemIDs.length];

    for (LongObjectMap.MapEntry<float[]> entry : Y.entrySet()) {
        double score = SimpleVectorMath.dot(userVector, entry.getValue());
        for (int i = 0; i < itemIDs.length; i++) {
            if (score > scores[i]) {
                rank[i]++;
            }
        }
    }

    Arrays.sort(rank);

    Mean precision = new Mean();
    double totalPrecisionTimesRelevance = 0.0;
    for (int i = 0; i < rank.length; i++) {
        int relevantRetrieved = i + 1;
        int precisionAt = rank[i] + 1;
        precision.increment((double) relevantRetrieved / precisionAt);
        totalPrecisionTimesRelevance += precision.getResult();
    }
    double averagePrecision = totalPrecisionTimesRelevance / rank.length;

    log.info("Average precision: {}", averagePrecision);

    emitter.emit(averagePrecision);
}

From source file:com.cloudera.oryx.als.computation.local.ComputeMAP.java

@Override
public Object call() throws IOException {

    LongObjectMap<LongSet> testData = new LongObjectMap<>();

    File[] files = testDir.listFiles(IOUtils.NOT_HIDDEN);
    if (files != null) {
        for (File file : files) {
            for (CharSequence line : new FileLineIterable(file)) {
                String[] columns = DelimitedDataUtils.decode(line);
                long userID = StringLongMapping.toLong(columns[0]);
                long itemID = StringLongMapping.toLong(columns[1]);
                LongSet itemIDs = testData.get(userID);
                if (itemIDs == null) {
                    itemIDs = new LongSet();
                    testData.put(userID, itemIDs);
                }// w w  w  .  j  av a2  s  .c o m
                itemIDs.add(itemID);
            }
        }
    }

    Mean meanAveragePrecision = new Mean();

    LongPrimitiveIterator it = X.keySetIterator();
    while (it.hasNext()) {
        long userID = it.nextLong();
        float[] userVector = X.get(userID);

        LongSet ids = testData.get(userID);
        if (ids == null || ids.isEmpty()) {
            continue;
        }

        long[] itemIDs = ids.toArray();

        double[] scores = new double[itemIDs.length];
        for (int i = 0; i < itemIDs.length; i++) {
            long itemID = itemIDs[i];
            float[] itemVector = Y.get(itemID);
            if (itemVector == null) {
                continue;
            }
            scores[i] = SimpleVectorMath.dot(userVector, itemVector);
        }

        int[] rank = new int[itemIDs.length];

        for (LongObjectMap.MapEntry<float[]> entry : Y.entrySet()) {
            double score = SimpleVectorMath.dot(userVector, entry.getValue());
            for (int i = 0; i < itemIDs.length; i++) {
                if (score > scores[i]) {
                    rank[i]++;
                }
            }
        }

        Arrays.sort(rank);

        Mean precision = new Mean();
        double totalPrecisionTimesRelevance = 0.0;
        for (int i = 0; i < rank.length; i++) {
            int relevantRetrieved = i + 1;
            int precisionAt = rank[i] + 1;
            precision.increment((double) relevantRetrieved / precisionAt);
            totalPrecisionTimesRelevance += precision.getResult();
        }
        double averagePrecision = totalPrecisionTimesRelevance / rank.length;

        meanAveragePrecision.increment(averagePrecision);
    }

    log.info("Mean average precision: {}", meanAveragePrecision.getResult());

    return null;
}

From source file:com.itemanalysis.psychometrics.irt.equating.RobustZEquatingTest.java

private void testB() {
    double[] bDiff = new double[nB];
    zb = new RobustZ[nB];

    for (int i = 0; i < nB; i++) {
        bDiff[i] = bY[i] - slope * bX[i];
    }//from   w  w  w.ja v  a 2  s.c  o m

    double median = percentile.evaluate(bDiff, 50);
    double q3 = percentile.evaluate(bDiff, 75);
    double q1 = percentile.evaluate(bDiff, 25);
    double iqr = q3 - q1;
    Mean mean = new Mean();

    for (int i = 0; i < nB; i++) {
        zb[i] = new RobustZ(bDiff[i], median, iqr);
        if (!zb[i].significant(significanceLevel)) {
            mean.increment(bDiff[i]);
        }
    }
    intercept = mean.getResult();
}