Example usage for org.apache.commons.math3.linear RealVector mapMultiplyToSelf

List of usage examples for org.apache.commons.math3.linear RealVector mapMultiplyToSelf

Introduction

In this page you can find the example usage for org.apache.commons.math3.linear RealVector mapMultiplyToSelf.

Prototype

public RealVector mapMultiplyToSelf(double d) 

Source Link

Document

Multiply each entry.

Usage

From source file:org.grouplens.samantha.modeler.reinforce.LinearUCB.java

public List<StochasticOracle> getStochasticOracle(List<LearningInstance> instances) {
    List<StochasticOracle> oracles = new ArrayList<>(instances.size());
    for (LearningInstance ins : instances) {
        StochasticOracle orc = new StochasticOracle();
        StandardLearningInstance instance = (StandardLearningInstance) ins;
        orc.setValues(-instance.getLabel(), instance.getLabel(), instance.getWeight());
        int dim = features.size();
        RealVector x = extractDenseVector(dim, ins);
        RealMatrix increA = x.outerProduct(x);
        RealVector increB = x.mapMultiplyToSelf(instance.getLabel());
        for (int i = 0; i < dim; i++) {
            orc.addScalarOracle(LinearUCBKey.B.get(), i, -increB.getEntry(i));
            orc.addVectorOracle(LinearUCBKey.A.get(), i, increA.getRowVector(i).mapMultiplyToSelf(-1.0));
        }/*from w w w.  j a v  a  2s .  c  o  m*/
        oracles.add(orc);
    }
    return oracles;
}

From source file:org.knime.knip.core.algorithm.convolvers.KernelTools.java

@SuppressWarnings("unchecked")
public static <K extends RealType<K> & NativeType<K>> RandomAccessibleInterval<K>[] decomposeKernel(
        final RandomAccessibleInterval<K> kernel) {

    SingularValueDecomposition svd = isDecomposable(SubsetOperations.subsetview(kernel, kernel));

    if (svd != null) {
        int tmp = 0;
        for (int d = 0; d < kernel.numDimensions(); d++) {
            if (kernel.dimension(d) > 1) {
                tmp++;//  w  w  w  .ja va  2  s .  co m
            }
        }
        int[] kernelDims = new int[tmp];
        tmp = 0;
        for (int d = 0; d < kernel.numDimensions(); d++) {
            if (kernel.dimension(d) > 1) {
                kernelDims[tmp++] = d;
            }
        }

        final RealVector v = svd.getV().getColumnVector(0);
        final RealVector u = svd.getU().getColumnVector(0);
        final double s = -Math.sqrt(svd.getS().getEntry(0, 0));
        v.mapMultiplyToSelf(s);
        u.mapMultiplyToSelf(s);

        K type = kernel.randomAccess().get().createVariable();

        RandomAccessibleInterval<K>[] decomposed = new RandomAccessibleInterval[2];

        decomposed[0] = KernelTools.adjustKernelDimensions(kernel.numDimensions(), new int[] { kernelDims[0] },
                KernelTools.vectorToImage(v, type, 1, new ArrayImgFactory<K>()));
        decomposed[1] = KernelTools.adjustKernelDimensions(kernel.numDimensions(), new int[] { kernelDims[1] },
                KernelTools.vectorToImage(u, type, 1, new ArrayImgFactory<K>()));
        return decomposed;

    } else {
        return new RandomAccessibleInterval[] { kernel };
    }

}

From source file:org.lenskit.pf.HPFModelProvider.java

@Override
public HPFModel get() {

    final int userNum = ratings.getUserIndex().size();
    final int itemNum = ratings.getItemIndex().size();
    final int featureCount = hyperParameters.getFeatureCount();
    final double a = hyperParameters.getUserWeightShpPrior();
    final double aPrime = hyperParameters.getUserActivityShpPrior();
    final double bPrime = hyperParameters.getUserActivityPriorMean();
    final double c = hyperParameters.getItemWeightShpPrior();
    final double cPrime = hyperParameters.getItemActivityShpPrior();
    final double dPrime = hyperParameters.getItemActivityPriorMean();
    final double kappaShpU = aPrime + featureCount * a;
    final double tauShpI = cPrime + featureCount * c;

    RealMatrix gammaShp = MatrixUtils.createRealMatrix(userNum, featureCount);
    RealMatrix gammaRte = MatrixUtils.createRealMatrix(userNum, featureCount);
    RealVector kappaShp = MatrixUtils.createRealVector(new double[userNum]);
    RealVector kappaRte = MatrixUtils.createRealVector(new double[userNum]);
    RealMatrix lambdaShp = MatrixUtils.createRealMatrix(itemNum, featureCount);
    RealMatrix lambdaRte = MatrixUtils.createRealMatrix(itemNum, featureCount);
    RealVector tauShp = MatrixUtils.createRealVector(new double[itemNum]);
    RealVector tauRte = MatrixUtils.createRealVector(new double[itemNum]);
    RealMatrix gammaShpNext = MatrixUtils.createRealMatrix(userNum, featureCount);
    RealMatrix lambdaShpNext = MatrixUtils.createRealMatrix(itemNum, featureCount);
    gammaShpNext = gammaShpNext.scalarAdd(a);
    lambdaShpNext = lambdaShpNext.scalarAdd(c);
    RealVector phiUI = MatrixUtils.createRealVector(new double[featureCount]);

    initialize(gammaShp, gammaRte, kappaRte, kappaShp, lambdaShp, lambdaRte, tauRte, tauShp);
    logger.info("initialization finished");

    final List<RatingMatrixEntry> train = ratings.getTrainRatings();
    final List<RatingMatrixEntry> validation = ratings.getValidationRatings();
    double avgPLLPre = Double.MAX_VALUE;
    double avgPLLCurr = 0.0;
    double diffPLL = 1.0;
    int iterCount = 1;

    while (iterCount < maxIterCount && diffPLL > threshold) {

        // update phi
        Iterator<RatingMatrixEntry> allUIPairs = train.iterator();
        while (allUIPairs.hasNext()) {
            RatingMatrixEntry entry = allUIPairs.next();
            int item = entry.getItemIndex();
            int user = entry.getUserIndex();
            double ratingUI = entry.getValue();
            if (ratingUI <= 0) {
                continue;
            }/*from w  w w  . j a  v  a 2  s. c  om*/

            for (int k = 0; k < featureCount; k++) {
                double gammaShpUK = gammaShp.getEntry(user, k);
                double gammaRteUK = gammaRte.getEntry(user, k);
                double lambdaShpIK = lambdaShp.getEntry(item, k);
                double lambdaRteIK = lambdaRte.getEntry(item, k);
                double phiUIK = Gamma.digamma(gammaShpUK) - Math.log(gammaRteUK) + Gamma.digamma(lambdaShpIK)
                        - Math.log(lambdaRteIK);
                phiUI.setEntry(k, phiUIK);
            }
            logNormalize(phiUI);

            if (ratingUI > 1) {
                phiUI.mapMultiplyToSelf(ratingUI);
            }

            for (int k = 0; k < featureCount; k++) {
                double value = phiUI.getEntry(k);
                gammaShpNext.addToEntry(user, k, value);
                lambdaShpNext.addToEntry(item, k, value);
            }

        }
        logger.info("iteration {} first phrase update finished", iterCount);

        RealVector gammaRteSecondTerm = MatrixUtils.createRealVector(new double[featureCount]);
        for (int k = 0; k < featureCount; k++) {
            double gammaRteUK = 0.0;
            for (int item = 0; item < itemNum; item++) {
                gammaRteUK += lambdaShp.getEntry(item, k) / lambdaRte.getEntry(item, k);
            }
            gammaRteSecondTerm.setEntry(k, gammaRteUK);
        }

        // update user parameters
        double kappaRteFirstTerm = aPrime / bPrime;
        for (int user = 0; user < userNum; user++) {

            double gammaRteUKFirstTerm = kappaShp.getEntry(user) / kappaRte.getEntry(user);
            double kappaRteU = 0.0;

            for (int k = 0; k < featureCount; k++) {
                double gammaShpUK = gammaShpNext.getEntry(user, k);
                gammaShp.setEntry(user, k, gammaShpUK);
                gammaShpNext.setEntry(user, k, a);
                double gammaRteUK = gammaRteSecondTerm.getEntry(k);
                gammaRteUK += gammaRteUKFirstTerm;
                gammaRte.setEntry(user, k, gammaRteUK);
                kappaRteU += gammaShpUK / gammaRteUK;
            }
            kappaRteU += kappaRteFirstTerm;
            kappaRte.setEntry(user, kappaRteU);
        }

        logger.info("iteration {} second phrase update finished", iterCount);

        RealVector lambdaRteSecondTerm = MatrixUtils.createRealVector(new double[featureCount]);
        for (int k = 0; k < featureCount; k++) {
            double lambdaRteIK = 0.0;
            for (int user = 0; user < userNum; user++) {
                lambdaRteIK += gammaShp.getEntry(user, k) / gammaRte.getEntry(user, k);
            }
            lambdaRteSecondTerm.setEntry(k, lambdaRteIK);
        }

        // update item parameters
        double tauRteFirstTerm = cPrime / dPrime;
        for (int item = 0; item < itemNum; item++) {

            double lambdaRteFirstTerm = tauShp.getEntry(item) / tauRte.getEntry(item);
            double tauRteI = 0.0;

            for (int k = 0; k < featureCount; k++) {
                double lambdaShpIK = lambdaShpNext.getEntry(item, k);
                lambdaShp.setEntry(item, k, lambdaShpIK);
                lambdaShpNext.setEntry(item, k, c);
                double lambdaRteIK = lambdaRteSecondTerm.getEntry(k);

                // plus first term
                lambdaRteIK += lambdaRteFirstTerm;
                lambdaRte.setEntry(item, k, lambdaRteIK);
                // update tauRteI second term
                tauRteI += lambdaShpIK / lambdaRteIK;
            }
            tauRteI += tauRteFirstTerm;
            tauRte.setEntry(item, tauRteI);
        }

        logger.info("iteration {} third phrase update finished", iterCount);

        // compute average predictive log likelihood of validation data per {@code iterationfrequency} iterations

        if (iterCount == 1) {
            for (int user = 0; user < userNum; user++) {
                kappaShp.setEntry(user, kappaShpU);
            }
            for (int item = 0; item < itemNum; item++) {
                tauShp.setEntry(item, tauShpI);
            }
        }

        if ((iterCount % iterationFrequency) == 0) {
            Iterator<RatingMatrixEntry> valIter = validation.iterator();
            avgPLLCurr = 0.0;

            while (valIter.hasNext()) {
                RatingMatrixEntry ratingEntry = valIter.next();
                int user = ratingEntry.getUserIndex();
                int item = ratingEntry.getItemIndex();
                double rating = ratingEntry.getValue();
                double eThetaBeta = 0.0;
                for (int k = 0; k < featureCount; k++) {
                    double eThetaUK = gammaShp.getEntry(user, k) / gammaRte.getEntry(user, k);
                    double eBetaIK = lambdaShp.getEntry(item, k) / lambdaRte.getEntry(item, k);
                    eThetaBeta += eThetaUK * eBetaIK;
                }
                double pLL = 0.0;
                if (isProbPredition) {
                    pLL = (rating == 0) ? (-eThetaBeta) : Math.log(1 - Math.exp(-eThetaBeta));
                } else {
                    pLL = rating * Math.log(eThetaBeta) - eThetaBeta - Gamma.logGamma(rating + 1);
                }
                avgPLLCurr += pLL;
            }
            avgPLLCurr = avgPLLCurr / validation.size();
            diffPLL = Math.abs((avgPLLCurr - avgPLLPre) / avgPLLPre);
            avgPLLPre = avgPLLCurr;
            logger.info("iteration {} with current average predictive log likelihood {} and the change is {}",
                    iterCount, avgPLLCurr, diffPLL);
        }
        iterCount++;
    }

    // construct feature matrix used by HPFModel
    RealMatrix eTheta = MatrixUtils.createRealMatrix(userNum, featureCount);
    RealMatrix eBeta = MatrixUtils.createRealMatrix(itemNum, featureCount);
    for (int user = 0; user < userNum; user++) {
        RealVector gammaShpU = gammaShp.getRowVector(user);
        RealVector gammaRteU = gammaRte.getRowVector(user);
        RealVector eThetaU = gammaShpU.ebeDivide(gammaRteU);
        eTheta.setRowVector(user, eThetaU);
        logger.info("Training user {} features finished", user);
    }

    for (int item = 0; item < itemNum; item++) {
        RealVector lambdaShpI = lambdaShp.getRowVector(item);
        RealVector lambdaRteI = lambdaRte.getRowVector(item);
        RealVector eBetaI = lambdaShpI.ebeDivide(lambdaRteI);
        eBeta.setRowVector(item, eBetaI);
        logger.info("Training item {} features finished", item);
    }

    KeyIndex uidx = ratings.getUserIndex();
    KeyIndex iidx = ratings.getItemIndex();

    return new HPFModel(eTheta, eBeta, uidx, iidx);
}

From source file:org.lenskit.pf.PMFModel.java

private static void updatePhi(RealVector phi, int user, int item, double rating, int featureCount,
        PMFModel userModel, PMFModel itemModel) {
    for (int k = 0; k < featureCount; k++) {
        double userWeightShp = userModel.getWeightShpEntry(user, k);
        double userWeightRte = userModel.getWeightRteEntry(user, k);
        double itemWeightShp = itemModel.getWeightShpEntry(item, k);
        double itemWeightRte = itemModel.getWeightRteEntry(item, k);
        double phiUIK = Scalars.digamma(userWeightShp) - Math.log(userWeightRte)
                + Scalars.digamma(itemWeightShp) - Math.log(itemWeightRte);
        phi.setEntry(k, phiUIK);/*from   www  . j av  a  2 s  .co  m*/
    }
    logNormalize(phi);

    if (rating > 1) {
        phi.mapMultiplyToSelf(rating);
    }
}