Example usage for org.apache.commons.math3.linear RealVector setEntry

List of usage examples for org.apache.commons.math3.linear RealVector setEntry

Introduction

In this page you can find the example usage for org.apache.commons.math3.linear RealVector setEntry.

Prototype

public abstract void setEntry(int index, double value) throws OutOfRangeException;

Source Link

Document

Set a single element.

Usage

From source file:org.lenskit.pf.HPFModelProvider.java

/**
 * Initialization of parameter matrices//from w w w. j a  v  a2s. com
 * @param gammaShp
 * @param gammaRte
 * @param kappaRte
 * @param kappaShp
 * @param lambdaShp
 * @param lambdaRte
 * @param tauRte
 * @param tauShp
 */
public void initialize(RealMatrix gammaShp, RealMatrix gammaRte, RealVector kappaRte, RealVector kappaShp,
        RealMatrix lambdaShp, RealMatrix lambdaRte, RealVector tauRte, RealVector tauShp) {
    final int userNum = ratings.getUserIndex().size();
    final int itemNum = ratings.getItemIndex().size();
    final int featureCount = hyperParameters.getFeatureCount();
    final double a = hyperParameters.getUserWeightShpPrior();
    final double aPrime = hyperParameters.getUserActivityShpPrior();
    final double bPrime = hyperParameters.getUserActivityPriorMean();
    final double c = hyperParameters.getItemWeightShpPrior();
    final double cPrime = hyperParameters.getItemActivityShpPrior();
    final double dPrime = hyperParameters.getItemActivityPriorMean();
    // Initialization
    Random random = new Random(rndSeed);
    final double kRte = aPrime + featureCount;
    final double tRte = cPrime + featureCount;

    for (int u = 0; u < userNum; u++) {
        for (int k = 0; k < featureCount; k++) {
            double valueShp = a + maxOffsetShp * random.nextDouble();
            double valueRte = aPrime + maxOffsetRte * random.nextDouble();
            gammaShp.setEntry(u, k, valueShp);
            gammaRte.setEntry(u, k, valueRte);
        }

        double kShp = aPrime + maxOffsetShp * random.nextDouble();
        kappaRte.setEntry(u, kRte);
        kappaShp.setEntry(u, kShp);
    }

    for (int i = 0; i < itemNum; i++) {
        for (int k = 0; k < featureCount; k++) {
            double valueShp = c + maxOffsetShp * random.nextDouble();
            double valueRte = cPrime + maxOffsetRte * random.nextDouble();
            lambdaShp.setEntry(i, k, valueShp);
            lambdaRte.setEntry(i, k, valueRte);
        }
        double tShp = cPrime + maxOffsetShp * random.nextDouble();
        tauRte.setEntry(i, tRte);
        tauShp.setEntry(i, tShp);
    }

}

From source file:org.lenskit.pf.HPFModelProvider.java

public void logNormalize(RealVector phi) {
    final int size = phi.getDimension();
    if (size == 1) {
        phi.setEntry(0, 1.0);
    }/*from   www  . ja v  a  2 s  .co m*/

    if (size > 1) {
        double logsum = phi.getEntry(0);
        for (int i = 1; i < size; i++) {
            double phiK = phi.getEntry(i);
            if (phiK < logsum) {
                logsum = logsum + Math.log(1 + Math.exp(phiK - logsum));
            } else {
                logsum = phiK + Math.log(1 + Math.exp(logsum - phiK));
            }
        }

        for (int k = 0; k < size; k++) {
            double phiK = phi.getEntry(k);
            double normalized = Math.exp(phiK - logsum);
            phi.setEntry(k, normalized);
        }
    }

}

From source file:org.lenskit.pf.PMFModel.java

private static void updatePhi(RealVector phi, int user, int item, double rating, int featureCount,
        PMFModel userModel, PMFModel itemModel) {
    for (int k = 0; k < featureCount; k++) {
        double userWeightShp = userModel.getWeightShpEntry(user, k);
        double userWeightRte = userModel.getWeightRteEntry(user, k);
        double itemWeightShp = itemModel.getWeightShpEntry(item, k);
        double itemWeightRte = itemModel.getWeightRteEntry(item, k);
        double phiUIK = Scalars.digamma(userWeightShp) - Math.log(userWeightRte)
                + Scalars.digamma(itemWeightShp) - Math.log(itemWeightRte);
        phi.setEntry(k, phiUIK);
    }//  w  ww.j  av a 2s. co m
    logNormalize(phi);

    if (rating > 1) {
        phi.mapMultiplyToSelf(rating);
    }
}

From source file:org.lenskit.pf.PMFModel.java

private static void logNormalize(RealVector phi) {
    final int size = phi.getDimension();
    if (size == 1) {
        phi.setEntry(0, 1.0);
    }//from   w ww. j  ava  2  s  .  c o m

    if (size > 1) {
        double logsum = phi.getEntry(0);
        for (int k = 1; k < size; k++) {
            double phiK = phi.getEntry(k);
            if (phiK < logsum) {
                logsum = logsum + Math.log(1 + Math.exp(phiK - logsum));
            } else {
                logsum = phiK + Math.log(1 + Math.exp(logsum - phiK));
            }
        }

        for (int k = 0; k < size; k++) {
            double phiK = phi.getEntry(k);
            double normalized = Math.exp(phiK - logsum);
            phi.setEntry(k, normalized);
        }
    }

}

From source file:org.lenskit.predict.ordrec.OrdRecModel.java

/**
 * Get the probability distribution according to score and thresholds
 * @param score The score/* w w w .ja  va 2  s . co m*/
 * @param vec The MutableVec to be filled in.
 */
public void getProbDistribution(double score, RealVector vec) {
    double pre = getProbLE(score, 0);
    vec.setEntry(0, pre);
    for (int i = 1; i < getLevelCount(); i++) {
        double pro = getProbLE(score, i);
        vec.setEntry(i, pro - pre);
        pre = pro;
    }

}

From source file:org.lenskit.predict.ordrec.OrdRecRatingPredictor.java

/**
 * The train function of OrdRec. Get all parameters after learning process.
 *//*  w  w w .  j a v a 2s .  c  o m*/
@SuppressWarnings("ConstantConditions")
private void trainModel(OrdRecModel model, SparseVector ratings, MutableSparseVector scores) {
    RealVector beta = model.getBeta();
    RealVector deltaBeta = new ArrayRealVector(beta.getDimension());
    double dt1;
    // n is the number of iteration;
    for (int j = 0; j < iterationCount; j++) {
        for (VectorEntry rating : ratings) {
            long iid = rating.getKey();
            double score = scores.get(iid);
            int r = quantizer.index(rating.getValue());

            double probEqualR = model.getProbEQ(score, r);
            double probLessR = model.getProbLE(score, r);
            double probLessR_1 = model.getProbLE(score, r - 1);

            double t1 = model.getT1();
            dt1 = learningRate / probEqualR * (probLessR * (1 - probLessR) * dBeta(r, 0, t1)
                    - probLessR_1 * (1 - probLessR_1) * dBeta(r - 1, 0, t1) - regTerm * t1);

            double dbetaK;
            for (int k = 0; k < beta.getDimension(); k++) {
                dbetaK = learningRate / probEqualR
                        * (probLessR * (1 - probLessR) * dBeta(r, k + 1, beta.getEntry(k))
                                - probLessR_1 * (1 - probLessR_1) * dBeta(r - 1, k + 1, beta.getEntry(k))
                                - regTerm * beta.getEntry(k));
                deltaBeta.setEntry(k, dbetaK);
            }
            model.update(dt1, deltaBeta);
        }
    }
}

From source file:org.rhwlab.dispim.datasource.MicroCluster.java

License:asdf

public static RealVector mean(List<MicroCluster> data) {
    if (data.isEmpty()) {
        return null;
    }/*from   ww w  . jav  a 2  s  .co  m*/
    RealVector first = data.get(0).asRealVector();
    long n = 0;
    long[] mu = new long[first.getDimension()];
    for (MicroCluster micro : data) {
        for (int p = 0; p < micro.points.length; ++p) {
            for (int d = 0; d < mu.length; ++d) {
                mu[d] = mu[d] + micro.points[p][d];
            }
            ++n;
        }
    }
    RealVector ret = new ArrayRealVector(first.getDimension());
    for (int d = 0; d < mu.length; ++d) {
        ret.setEntry(d, (double) mu[d] / (double) n);
    }
    return ret;
}

From source file:org.rhwlab.dispim.datasource.MicroCluster.java

License:asdf

public static RealMatrix precision(List<MicroCluster> data, RealVector mu) {
    RealMatrix ret = new Array2DRowRealMatrix(mu.getDimension(), mu.getDimension());
    RealVector v = new ArrayRealVector(mu.getDimension());
    long n = 0;//  w  w  w  .  j av a 2 s .c  o  m
    for (MicroCluster micro : data) {
        for (int p = 0; p < micro.points.length; ++p) {
            for (int d = 0; d < mu.getDimension(); ++d) {
                v.setEntry(d, micro.points[p][d]);
            }
            RealVector del = v.subtract(mu);
            ret = ret.add(del.outerProduct(del));
            ++n;
        }

    }
    ret = ret.scalarMultiply(1.0 / n);
    LUDecomposition lud = new LUDecomposition(ret);
    RealMatrix prec = null;
    if (lud.getSolver().isNonSingular()) {
        prec = lud.getSolver().getInverse();
    }
    return prec;
}

From source file:playground.sergioo.facilitiesGenerator2012.WorkFacilitiesGeneration.java

private static Set<PointPerson> getPCATransformation(Collection<PointPerson> points) {
    RealMatrix pointsM = new Array2DRowRealMatrix(points.iterator().next().getDimension(), points.size());
    int k = 0;//from   w  w  w . j  a  va 2  s  . c o  m
    for (PointND<Double> point : points) {
        for (int f = 0; f < point.getDimension(); f++)
            pointsM.setEntry(f, k, point.getElement(f));
        k++;
    }
    RealMatrix means = new Array2DRowRealMatrix(pointsM.getRowDimension(), 1);
    for (int r = 0; r < means.getRowDimension(); r++) {
        double mean = 0;
        for (int c = 0; c < pointsM.getColumnDimension(); c++)
            mean += pointsM.getEntry(r, c) / pointsM.getColumnDimension();
        means.setEntry(r, 0, mean);
    }
    RealMatrix deviations = new Array2DRowRealMatrix(pointsM.getRowDimension(), pointsM.getColumnDimension());
    for (int r = 0; r < deviations.getRowDimension(); r++)
        for (int c = 0; c < deviations.getColumnDimension(); c++)
            deviations.setEntry(r, c, pointsM.getEntry(r, c) - means.getEntry(r, 0));
    RealMatrix covariance = deviations.multiply(deviations.transpose())
            .scalarMultiply(1 / (double) pointsM.getColumnDimension());
    EigenDecomposition eigenDecomposition = new EigenDecomposition(covariance, 0);
    RealMatrix eigenVectorsT = eigenDecomposition.getVT();
    RealVector eigenValues = new ArrayRealVector(eigenDecomposition.getD().getRowDimension());
    for (int r = 0; r < eigenDecomposition.getD().getRowDimension(); r++)
        eigenValues.setEntry(r, eigenDecomposition.getD().getEntry(r, r));
    for (int i = 0; i < eigenValues.getDimension(); i++) {
        for (int j = i + 1; j < eigenValues.getDimension(); j++)
            if (eigenValues.getEntry(i) < eigenValues.getEntry(j)) {
                double tempValue = eigenValues.getEntry(i);
                eigenValues.setEntry(i, eigenValues.getEntry(j));
                eigenValues.setEntry(j, tempValue);
                RealVector tempVector = eigenVectorsT.getRowVector(i);
                eigenVectorsT.setRowVector(i, eigenVectorsT.getRowVector(j));
                eigenVectorsT.setRowVector(j, tempVector);
            }
        eigenVectorsT.setRowVector(i,
                eigenVectorsT.getRowVector(i).mapMultiply(Math.sqrt(1 / eigenValues.getEntry(i))));
    }
    RealVector standardDeviations = new ArrayRealVector(pointsM.getRowDimension());
    for (int r = 0; r < covariance.getRowDimension(); r++)
        standardDeviations.setEntry(r, Math.sqrt(covariance.getEntry(r, r)));
    double zValue = standardDeviations.dotProduct(new ArrayRealVector(pointsM.getRowDimension(), 1));
    RealMatrix zScore = deviations.scalarMultiply(1 / zValue);
    pointsM = eigenVectorsT.multiply(zScore);
    Set<PointPerson> pointsC = new HashSet<PointPerson>();
    k = 0;
    for (PointPerson point : points) {
        PointPerson pointC = new PointPerson(point.getId(), point.getOccupation(),
                new Double[] { pointsM.getEntry(0, k), pointsM.getEntry(1, k) }, point.getPlaceType());
        pointC.setWeight(point.getWeight());
        pointsC.add(pointC);
        k++;
    }
    return pointsC;
}

From source file:scorePairing.ScorePairingWithStaticMethods.java

public static ResultFromScorePairing computeCost(PairingAndNullSpaces currentNewPairingAndNewNullSpaces,
        Map<Integer, PointWithPropertiesIfc> queryShape, Map<Integer, PointWithPropertiesIfc> hitShape,
        AlgoParameters algoParameters) {

    int countOfPairedPoint = currentNewPairingAndNewNullSpaces.getPairing().size();

    double[][] matrixPointsModelDouble = new double[3][countOfPairedPoint];
    double[][] matrixPointsCandidateDouble = new double[3][countOfPairedPoint];

    RealMatrix matrixPointsModel = new Array2DRowRealMatrix(matrixPointsModelDouble);
    RealMatrix matrixPointsCandidate = new Array2DRowRealMatrix(matrixPointsCandidateDouble);

    int currentPoint = 0;

    RealVector sumVector1 = new ArrayRealVector(new double[3]);
    RealVector sumVector2 = new ArrayRealVector(new double[3]);

    sumVector1.setEntry(0, 0.0);
    sumVector1.setEntry(1, 0.0);/* ww  w. j a  va  2 s .c o m*/
    sumVector1.setEntry(2, 0.0);
    sumVector2.setEntry(0, 0.0);
    sumVector2.setEntry(1, 0.0);
    sumVector2.setEntry(2, 0.0);

    for (Map.Entry<Integer, Integer> entry : currentNewPairingAndNewNullSpaces.getPairing().entrySet()) {

        Integer point1id = Integer.valueOf(entry.getKey().intValue());
        Integer point2id = Integer.valueOf(entry.getValue().intValue());

        // the 2 points could be defined as static be static
        PointWithPropertiesIfc point1 = queryShape.get(point1id);
        PointWithPropertiesIfc point2 = hitShape.get(point2id);

        // matrix for procrustes
        matrixPointsModel.setEntry(0, currentPoint, point1.getCoords().getCoords()[0]);
        matrixPointsModel.setEntry(1, currentPoint, point1.getCoords().getCoords()[1]);
        matrixPointsModel.setEntry(2, currentPoint, point1.getCoords().getCoords()[2]);

        matrixPointsCandidate.setEntry(0, currentPoint, point2.getCoords().getCoords()[0]);
        matrixPointsCandidate.setEntry(1, currentPoint, point2.getCoords().getCoords()[1]);
        matrixPointsCandidate.setEntry(2, currentPoint, point2.getCoords().getCoords()[2]);

        // barycenter computation
        sumVector1.addToEntry(0, point1.getCoords().getCoords()[0]);
        sumVector1.addToEntry(1, point1.getCoords().getCoords()[1]);
        sumVector1.addToEntry(2, point1.getCoords().getCoords()[2]);

        sumVector2.addToEntry(0, point2.getCoords().getCoords()[0]);
        sumVector2.addToEntry(1, point2.getCoords().getCoords()[1]);
        sumVector2.addToEntry(2, point2.getCoords().getCoords()[2]);
        currentPoint += 1;

    }

    RealVector barycenterVector1 = sumVector1.mapDivide((double) currentPoint);
    RealVector barycenterVector2 = sumVector2.mapDivide((double) currentPoint);

    RealVector translationVectorToTranslateShape2ToOrigin = barycenterVector2.mapMultiply(-1.0);
    RealVector translationVectorToTranslateShape2ToShape1 = barycenterVector1.subtract(barycenterVector2);

    translateBarycenterListOfPointToOrigin(matrixPointsModel, barycenterVector1);
    translateBarycenterListOfPointToOrigin(matrixPointsCandidate, barycenterVector2);

    ProcrustesAnalysisIfc procrustesAnalysis = null;
    try {
        procrustesAnalysis = algoParameters.procrustesAnalysisBuffer.get();
    } catch (InterruptedException e1) {
        // TODO Auto-generated catch block
        e1.printStackTrace();
    }
    procrustesAnalysis.run(matrixPointsModel, matrixPointsCandidate);

    RealMatrix rotationMatrixToRotateShape2ToShape1 = procrustesAnalysis.getRotationMatrix();

    try {
        algoParameters.procrustesAnalysisBuffer.put(procrustesAnalysis);
    } catch (InterruptedException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    // not used: we dont really care how accurate is the overlap of points in pairs as it is within expected range because of the
    // fact that I extend hits with a distance criteria
    double distanceResidual = procrustesAnalysis.getResidual();
    // no need to use hit coverage as already used as a filter

    // no need to use the distance to outside as it is used as a filter

    // handling of probability to have a small contribution of if
    float toleranceElectronProbability = 0.2f;

    // use only properties for cost
    double costCharge = 0.0;
    double costHydrophobicity = 0.0;
    double costHbondDonnor = 0.0;
    double costHbondAcceptor = 0.0;
    double costDehydron = 0.0;
    double costAromaticRing = 0.0;
    double costProbabilityDiff = 0.0;

    for (Map.Entry<Integer, Integer> entry : currentNewPairingAndNewNullSpaces.getPairing().entrySet()) {

        Integer idFromMap1 = entry.getKey();
        Integer idFromMap2 = entry.getValue();
        PointWithPropertiesIfc point1 = queryShape.get(idFromMap1);
        PointWithPropertiesIfc point2 = hitShape.get(idFromMap2);

        float probabilityOfPoint1 = point1.getElectronProbability();
        float probabilityOfPoint2 = point2.getElectronProbability();
        float probabilityDiff = Math.abs(probabilityOfPoint1 - probabilityOfPoint2);
        if (probabilityDiff < toleranceElectronProbability) {
            probabilityDiff = 0.0f;
        } else {
            probabilityDiff -= toleranceElectronProbability;
        }
        costProbabilityDiff += probabilityDiff;

        // handle multi properties

        // Hydrophobe and Aromatic
        Float hydrophobicityOfPointFromMap1 = point1.get(PropertyName.Hydrophobicity);
        Float hydrophobicityOfPointFromMap2 = point2.get(PropertyName.Hydrophobicity);
        Float aromaticRingOfPointFromMap1 = point1.get(PropertyName.AromaticRing);
        Float aromaticRingOfPointFromMap2 = point2.get(PropertyName.AromaticRing);

        // if aromatic it is hydrophobe as well by definition
        if (aromaticRingOfPointFromMap1 != null && hydrophobicityOfPointFromMap1 == null) {
            System.out.println(aromaticRingOfPointFromMap1 != null && hydrophobicityOfPointFromMap1 == null);
        }
        if (aromaticRingOfPointFromMap2 != null && hydrophobicityOfPointFromMap2 == null) {
            System.out.println(aromaticRingOfPointFromMap2 != null && hydrophobicityOfPointFromMap2 == null);
        }
        // if one hydrophobe and not the other one
        // then cost hydrophobe
        if (hydrophobicityOfPointFromMap1 != null && hydrophobicityOfPointFromMap2 == null) {
            costHydrophobicity += returnCost(hydrophobicityOfPointFromMap1, hydrophobicityOfPointFromMap2);
        }
        if (hydrophobicityOfPointFromMap2 != null && hydrophobicityOfPointFromMap1 == null) {
            costHydrophobicity += returnCost(hydrophobicityOfPointFromMap1, hydrophobicityOfPointFromMap2);
        }
        // so a difference of hydrophobe to aromatic is neglected !!!

        Float chargeOfPointFromMap1 = point1.get(PropertyName.FormalCharge);
        Float chargeOfPointFromMap2 = point2.get(PropertyName.FormalCharge);
        costCharge += returnCost(chargeOfPointFromMap1, chargeOfPointFromMap2);

        Float hBondDonnorOfPointFromMap1 = point1.get(PropertyName.HbondDonnor);
        Float hBondDonnorOfPointFromMap2 = point2.get(PropertyName.HbondDonnor);
        Float hBondAcceptorOfPointFromMap1 = point1.get(PropertyName.HbondAcceptor);
        Float hBondAcceptorOfPointFromMap2 = point2.get(PropertyName.HbondAcceptor);

        costHbondDonnor += returnCost(hBondDonnorOfPointFromMap1, hBondDonnorOfPointFromMap2);
        costHbondAcceptor += returnCost(hBondAcceptorOfPointFromMap1, hBondAcceptorOfPointFromMap2);

        Float dehydronOfPointFromMap1 = point1.get(PropertyName.Dehydron);
        Float dehydronOfPointFromMap2 = point2.get(PropertyName.Dehydron);
        costDehydron += returnCost(dehydronOfPointFromMap1, dehydronOfPointFromMap2);

    }

    int size = currentNewPairingAndNewNullSpaces.getPairing().size();

    costCharge = costCharge / size;
    costHbondDonnor = costHbondDonnor / size;
    costHbondAcceptor = costHbondAcceptor / size;
    costDehydron = costDehydron / size;
    costHydrophobicity = costHydrophobicity / size;
    costAromaticRing = costAromaticRing / size;
    costProbabilityDiff = costProbabilityDiff / size;

    double costOnPairs = algoParameters.getWEIGHT_DIFFERENCE_IN_CHARGES_BETWEEN_PAIRED_POINTS() * costCharge
            + algoParameters.getWEIGHT_DIFFERENCE_IN_HYDROPHOBICITY_BETWEEN_PAIRED_POINTS() * costHydrophobicity
            + algoParameters.getWEIGHT_HBOND_DONNOR() * costHbondDonnor
            + algoParameters.getWEIGHT_HBOND_ACCEPTOR() * costHbondAcceptor
            + algoParameters.getWEIGHT_DEHYDRON() * costDehydron
            + algoParameters.getWEIGHT_DIFFERENCE_AROMATICRING() * costAromaticRing
            + algoParameters.getWEIGHT_DIFFERENCE_IN_PROBABILITIES_IN_PAIRED_POINTS() * costProbabilityDiff;

    //double costOnUnpairedPoints = computeRatioFromUnpairedPointsHitToAllPointsHits(currentNewPairingAndNewNullSpaces, hitShape);
    //double newFinalCost = costOnPairs + algoParameters.getWEIGHT_UNPAIRED_POINT_IN_SMALLEST_MAP() * costOnUnpairedPoints;

    double sumWeight = algoParameters.getWEIGHT_DIFFERENCE_IN_CHARGES_BETWEEN_PAIRED_POINTS()
            + algoParameters.getWEIGHT_DIFFERENCE_IN_HYDROPHOBICITY_BETWEEN_PAIRED_POINTS()
            + algoParameters.getWEIGHT_HBOND_DONNOR() + algoParameters.getWEIGHT_HBOND_ACCEPTOR()
            + algoParameters.getWEIGHT_DEHYDRON() + algoParameters.getWEIGHT_DIFFERENCE_AROMATICRING()
            + algoParameters.getWEIGHT_DIFFERENCE_IN_PROBABILITIES_IN_PAIRED_POINTS();

    double finalCost = costOnPairs / sumWeight;

    ResultFromScorePairing resultFromScorePairing = new ResultFromScorePairing(finalCost,
            rotationMatrixToRotateShape2ToShape1, translationVectorToTranslateShape2ToShape1,
            translationVectorToTranslateShape2ToOrigin, distanceResidual);
    return resultFromScorePairing;

}