Example usage for com.google.common.collect Table contains

List of usage examples for com.google.common.collect Table contains

Introduction

In this page you can find the example usage for com.google.common.collect Table contains.

Prototype

boolean contains(@Nullable Object rowKey, @Nullable Object columnKey);

Source Link

Document

Returns true if the table contains a mapping with the specified row and column keys.

Usage

From source file:i5.las2peer.services.recommender.librec.rating.TimeNeighSVD.java

@Override
protected double predict(int u, int i) throws Exception {
    // retrieve the test rating timestamp
    long timestamp = (long) testTimeMatrix.get(u, i);
    int t = days(timestamp, minTimestamp);
    if (t < 0)
        t = 0;//from   www  .  j  a va  2  s.co  m
    if (t > (numDays - 1))
        t = numDays - 1;
    int bin = bin(t);
    double dev_ut = dev(u, t);

    double pred = globalMean;

    // bi(t): eq. (12)
    double bi = itemBias.get(i);
    double bit = Bit.get(i, bin);
    double cu = Cu.get(u);
    double cut = 0;
    try {
        cut = Cut.get(u, t);
    } catch (Exception e) {
        System.out.println("Cut dimensions: " + Cut.numRows() + "x" + Cut.numColumns());
        System.out.println("numUsers: " + numUsers);
        System.out.println("numDays: " + numDays);
        System.out.println("user: " + u);
        System.out.println("day: " + t);
        e.printStackTrace();
    }
    pred += (bi + bit) * (cu + cut);

    // bu(t): eq. (9)
    double bu = userBias.get(u);
    double au = Alpha.get(u);
    double but = But.contains(u, t) ? But.get(u, t) : 0;
    pred += bu + au * dev_ut + but;

    // qi * yj
    List<Integer> Ru = userItemsCache.get(u);
    double sum_y = 0;
    for (int j : Ru)
        sum_y += DenseMatrix.rowMult(Y, j, Q, i);

    double wi = Ru.size() > 0 ? Math.pow(Ru.size(), -0.5) : 0;
    pred += sum_y * wi;

    // qi * pu(t)
    for (int k = 0; k < numFactors; k++) {
        double qik = Q.get(i, k);
        // eq. (13)
        double puk = P.get(u, k) + Auk.get(u, k) * dev_ut;

        if (Pukt.containsKey(u)) {
            Table<Integer, Integer, Double> pkt = Pukt.get(u);
            if (pkt != null) {
                // eq. (13)
                puk += (pkt.contains(k, t) ? pkt.get(k, t) : 0);
            }
        }

        pred += puk * qik;
    }

    // e^(-beta_u * |t-tj|)(ruj - buj) * wij + cij): eq. (16)
    // we use phi instead of beta since beta is already used for the time deviation in the baseline model
    for (int j : Ru) {
        double e = decay(u, j, t);
        double ruj = trainMatrix.get(u, j);
        double buj = (itemBias.get(i) + Bit.get(i, bin)) * (Cu.get(u) + Cut.get(u, t));
        buj += userBias.get(u) + Alpha.get(u) * dev_ut;
        buj += But.contains(u, t) ? But.get(u, t) : 0;

        pred += e * ((ruj - buj) * W.get(i, j) + C.get(i, j)) * wi;
    }

    return pred;
}

From source file:librec.undefined.TimeSVDPlusPlus.java

@Override
protected void buildModel() throws Exception {
    for (int iter = 1; iter <= numIters; iter++) {
        errs = 0;//  ww  w .java  2  s  .  c o  m
        loss = 0;

        for (MatrixEntry me : trainMatrix) {
            int u = me.row();
            int i = me.column();
            double rui = me.get();
            if (rui <= 0)
                continue;

            long t = ratingContexts.get(u, i).getTimestamp();
            int bin = bin(t);
            int day = day(t);
            double dev = dev(u, t);

            double bi = itemBias.get(i);
            double bit = Bit.get(i, bin);
            double bu = userBias.get(u);
            double but = But.get(u, day);
            double au = userAlpha.get(u); // alpha_u
            double cu = userScaling.get(u);
            double cut = Cut.get(u, day);

            double pui = globalMean + (bi + bit) * (cu + cut); // mu + bi(t)
            pui += bu + au * dev(u, t) + but; // bu(t)

            // qi*yi
            SparseVector Ru = trainMatrix.row(u);
            double sum_y = 0;
            for (VectorEntry ve : Ru) {
                int k = ve.index();
                sum_y += DenseMatrix.rowMult(Y, k, Q, i);
            }
            if (Ru.getCount() > 0)
                pui += sum_y / Math.pow(Ru.getCount(), -0.5);

            // qi*pu(t)
            if (!Puft.containsKey(u)) {
                Table<Integer, Integer, Double> data = HashBasedTable.create();
                Puft.put(u, data);
            }

            Table<Integer, Integer, Double> data = Puft.get(u);
            for (int f = 0; f < numFactors; f++) {
                double qif = Q.get(i, f);
                if (!data.contains(f, day)) {
                    // late initialization
                    data.put(f, day, Randoms.random());
                }
                double puf = P.get(u, f) + Auf.get(u, f) * dev + data.get(f, day);

                pui += puf * qif;
            }

            double eui = pui - rui;
            errs += eui * eui;
            loss += eui * eui;

            // update bu
            double sgd = eui + regB * bu;
            userBias.add(u, -lRate * sgd);

            // TODO: add codes here to update other variables
        }

        if (isConverged(iter))
            break;
    }
}

From source file:co.cask.cdap.internal.app.verification.FlowVerification.java

/**
 * Verifies a single {@link FlowSpecification} for a {@link co.cask.cdap.api.flow.Flow}.
 *
 * @param input to be verified/*from www .  j a va  2  s  .  co m*/
 * @return An instance of {@link VerifyResult} depending of status of verification.
 */
@Override
public VerifyResult verify(Id.Application appId, final FlowSpecification input) {
    VerifyResult verifyResult = super.verify(appId, input);
    if (!verifyResult.isSuccess()) {
        return verifyResult;
    }

    String flowName = input.getName();

    // Check if there are no flowlets.
    if (input.getFlowlets().isEmpty()) {
        return VerifyResult.failure(Err.Flow.ATLEAST_ONE_FLOWLET, flowName);
    }

    // Check if there no connections.
    if (input.getConnections().isEmpty()) {
        return VerifyResult.failure(Err.Flow.ATLEAST_ONE_CONNECTION, flowName);
    }

    // We go through each Flowlet and verify the flowlets.

    // First collect all source flowlet names
    Set<String> sourceFlowletNames = Sets.newHashSet();
    for (FlowletConnection connection : input.getConnections()) {
        if (connection.getSourceType() == FlowletConnection.Type.FLOWLET) {
            sourceFlowletNames.add(connection.getSourceName());
        }
    }

    for (Map.Entry<String, FlowletDefinition> entry : input.getFlowlets().entrySet()) {
        FlowletDefinition defn = entry.getValue();
        String flowletName = defn.getFlowletSpec().getName();

        // Check if the Flowlet Name is an ID.
        if (!isId(defn.getFlowletSpec().getName())) {
            return VerifyResult.failure(Err.NOT_AN_ID, flowName + ":" + flowletName);
        }

        // We check if all the dataset names used are ids
        for (String dataSet : defn.getDatasets()) {
            if (!isId(dataSet)) {
                return VerifyResult.failure(Err.NOT_AN_ID, flowName + ":" + flowletName + ":" + dataSet);
            }
        }

        // Check if the flowlet has output, it must be appear as source flowlet in at least one connection
        if (entry.getValue().getOutputs().size() > 0 && !sourceFlowletNames.contains(flowletName)) {
            return VerifyResult.failure(Err.Flow.OUTPUT_NOT_CONNECTED, flowName, flowletName);
        }
    }

    // NOTE: We should unify the logic here and the queue spec generation, as they are doing the same thing.
    Table<QueueSpecificationGenerator.Node, String, Set<QueueSpecification>> queueSpecTable = new SimpleQueueSpecificationGenerator(
            appId).create(input);

    // For all connections, there should be an entry in the table.
    for (FlowletConnection connection : input.getConnections()) {
        QueueSpecificationGenerator.Node node = new QueueSpecificationGenerator.Node(connection.getSourceType(),
                connection.getSourceName());
        if (!queueSpecTable.contains(node, connection.getTargetName())) {
            return VerifyResult.failure(Err.Flow.NO_INPUT_FOR_OUTPUT, flowName, connection.getTargetName(),
                    connection.getSourceType(), connection.getSourceName());
        }
    }

    // For each output entity, check for any unconnected output
    for (QueueSpecificationGenerator.Node node : queueSpecTable.rowKeySet()) {
        // For stream output, no need to check
        if (node.getType() == FlowletConnection.Type.STREAM) {
            continue;
        }

        // For all outputs of a flowlet, remove all the matched connected schema, if there is anything left,
        // then it's a incomplete flow connection (has output not connect to any input).
        Multimap<String, Schema> outputs = toMultimap(input.getFlowlets().get(node.getName()).getOutputs());
        for (Map.Entry<String, Set<QueueSpecification>> entry : queueSpecTable.row(node).entrySet()) {
            for (QueueSpecification queueSpec : entry.getValue()) {
                outputs.remove(queueSpec.getQueueName().getSimpleName(), queueSpec.getOutputSchema());
            }
        }

        if (!outputs.isEmpty()) {
            return VerifyResult.failure(Err.Flow.MORE_OUTPUT_NOT_ALLOWED, flowName,
                    node.getType().toString().toLowerCase(), node.getName(), outputs);
        }
    }

    return VerifyResult.success();
}

From source file:librec.rating.TimeSVD.java

@Override
protected void buildModel() throws Exception {
    for (int iter = 1; iter <= numIters; iter++) {
        loss = 0;//from  w w  w.  j a  va2  s  . c o  m

        for (MatrixEntry me : trainMatrix) {
            int u = me.row();
            int i = me.column();
            double rui = me.get();

            long timestamp = (long) timeMatrix.get(u, i);
            // day t
            int t = days(timestamp, minTimestamp);
            int bin = bin(t);
            double dev_ut = dev(u, t);

            double bi = itemBias.get(i);
            double bit = Bit.get(i, bin);
            double bu = userBias.get(u);

            double cu = Cu.get(u);
            double cut = Cut.get(u, t);

            // lazy initialization
            if (!But.contains(u, t))
                But.put(u, t, Randoms.random());
            double but = But.get(u, t);

            double au = Alpha.get(u); // alpha_u

            double pui = globalMean + (bi + bit) * (cu + cut); // mu + bi(t)
            pui += bu + au * dev_ut + but; // bu(t)

            // qi * yj
            List<Integer> Ru = userItemsCache.get(u);
            double sum_y = 0;
            for (int j : Ru) {
                sum_y += DenseMatrix.rowMult(Y, j, Q, i);
            }
            double wi = Ru.size() > 0 ? Math.pow(Ru.size(), -0.5) : 0;
            pui += sum_y * wi;

            // qi * pu(t)
            if (!Pukt.containsKey(u)) {
                Table<Integer, Integer, Double> data = HashBasedTable.create();
                Pukt.put(u, data);
            }

            Table<Integer, Integer, Double> Pkt = Pukt.get(u);
            for (int k = 0; k < numFactors; k++) {
                double qik = Q.get(i, k);

                // lazy initialization
                if (!Pkt.contains(k, t))
                    Pkt.put(k, t, Randoms.random());

                double puk = P.get(u, k) + Auk.get(u, k) * dev_ut + Pkt.get(k, t);

                pui += puk * qik;
            }

            double eui = pui - rui;
            loss += eui * eui;

            // update bi
            double sgd = eui * (cu + cut) + regB * bi;
            itemBias.add(i, -lRate * sgd);
            loss += regB * bi * bi;

            // update bi,bin(t)
            sgd = eui * (cu + cut) + regB * bit;
            Bit.add(i, bin, -lRate * sgd);
            loss += regB * bit * bit;

            // update cu
            sgd = eui * (bi + bit) + regB * cu;
            Cu.add(u, -lRate * sgd);
            loss += regB * cu * cu;

            // update cut
            sgd = eui * (bi + bit) + regB * cut;
            Cut.add(u, t, -lRate * sgd);
            loss += regB * cut * cut;

            // update bu
            sgd = eui + regB * bu;
            userBias.add(u, -lRate * sgd);
            loss += regB * bu * bu;

            // update au
            sgd = eui * dev_ut + regB * au;
            Alpha.add(u, -lRate * sgd);
            loss += regB * au * au;

            // update but
            sgd = eui + regB * but;
            double delta = but - lRate * sgd;
            But.put(u, t, delta);
            loss += regB * but * but;

            for (int k = 0; k < numFactors; k++) {
                double qik = Q.get(i, k);
                double puk = P.get(u, k);
                double auk = Auk.get(u, k);
                double pkt = Pkt.get(k, t);

                // update qik
                double pukt = puk + auk * dev_ut + pkt;

                double sum_yk = 0;
                for (int j : Ru)
                    sum_yk += Y.get(j, k);

                sgd = eui * (pukt + wi * sum_yk) + regI * qik;
                Q.add(i, k, -lRate * sgd);
                loss += regI * qik * qik;

                // update puk
                sgd = eui * qik + regU * puk;
                P.add(u, k, -lRate * sgd);
                loss += regU * puk * puk;

                // update auk
                sgd = eui * qik * dev_ut + regU * auk;
                Auk.add(u, k, -lRate * sgd);
                loss += regU * auk * auk;

                // update pkt
                sgd = eui * qik + regU * pkt;
                delta = pkt - lRate * sgd;
                Pkt.put(k, t, delta);
                loss += regU * pkt * pkt;

                // update yjk
                for (int j : Ru) {
                    double yjk = Y.get(j, k);
                    sgd = eui * wi * qik + regI * yjk;
                    Y.add(j, k, -lRate * sgd);
                    loss += regI * yjk * yjk;
                }
            }
        }

        loss *= 0.5;

        if (isConverged(iter))
            break;
    }
}

From source file:net.librec.recommender.context.rating.TimeSVDRecommender.java

@Override
protected void trainModel() throws LibrecException {
    for (int iter = 1; iter <= numIterations; iter++) {
        loss = 0;/*from  w w  w  .jav  a 2 s  .c  om*/

        for (MatrixEntry me : trainMatrix) {
            int u = me.row();
            int i = me.column();
            double rui = me.get();

            long timestamp = (long) timeMatrix.get(u, i);
            // day t
            int t = days(timestamp, minTimestamp);
            int bin = bin(t);
            double dev_ut = dev(u, t);

            double bi = itemBiases.get(i);
            double bit = Bit.get(i, bin);
            double bu = userBiases.get(u);

            double cu = Cu.get(u);
            double cut = Cut.get(u, t);

            // lazy initialization
            if (!But.contains(u, t))
                But.put(u, t, Randoms.random());
            double but = But.get(u, t);

            double au = Alpha.get(u); // alpha_u

            double pui = globalMean + (bi + bit) * (cu + cut); // mu + bi(t)
            pui += bu + au * dev_ut + but; // bu(t)

            // qi * yj
            List<Integer> Ru = null;
            try {
                Ru = userItemsCache.get(u);
            } catch (ExecutionException e) {
                e.printStackTrace();
            }

            double sum_y = 0;
            for (int j : Ru) {
                sum_y += DenseMatrix.rowMult(Y, j, Q, i);
            }
            double wi = Ru.size() > 0 ? Math.pow(Ru.size(), -0.5) : 0;
            pui += sum_y * wi;

            // qi * pu(t)
            if (!Pukt.containsKey(u)) {
                Table<Integer, Integer, Double> data = HashBasedTable.create();
                Pukt.put(u, data);
            }

            Table<Integer, Integer, Double> Pkt = Pukt.get(u);
            for (int k = 0; k < numFactors; k++) {
                double qik = Q.get(i, k);

                // lazy initialization
                if (!Pkt.contains(k, t))
                    Pkt.put(k, t, Randoms.random());

                double puk = P.get(u, k) + Auk.get(u, k) * dev_ut + Pkt.get(k, t);

                pui += puk * qik;
            }

            double eui = pui - rui;
            loss += eui * eui;

            // update bi
            double sgd = eui * (cu + cut) + regBias * bi;
            itemBiases.add(i, -learnRate * sgd);
            loss += regBias * bi * bi;

            // update bi,bin(t)
            sgd = eui * (cu + cut) + regBias * bit;
            Bit.add(i, bin, -learnRate * sgd);
            loss += regBias * bit * bit;

            // update cu
            sgd = eui * (bi + bit) + regBias * cu;
            Cu.add(u, -learnRate * sgd);
            loss += regBias * cu * cu;

            // update cut
            sgd = eui * (bi + bit) + regBias * cut;
            Cut.add(u, t, -learnRate * sgd);
            loss += regBias * cut * cut;

            // update bu
            sgd = eui + regBias * bu;
            userBiases.add(u, -learnRate * sgd);
            loss += regBias * bu * bu;

            // update au
            sgd = eui * dev_ut + regBias * au;
            Alpha.add(u, -learnRate * sgd);
            loss += regBias * au * au;

            // update but
            sgd = eui + regBias * but;
            double delta = but - learnRate * sgd;
            But.put(u, t, delta);
            loss += regBias * but * but;

            for (int k = 0; k < numFactors; k++) {
                double qik = Q.get(i, k);
                double puk = P.get(u, k);
                double auk = Auk.get(u, k);
                double pkt = Pkt.get(k, t);

                // update qik
                double pukt = puk + auk * dev_ut + pkt;

                double sum_yk = 0;
                for (int j : Ru)
                    sum_yk += Y.get(j, k);

                sgd = eui * (pukt + wi * sum_yk) + regItem * qik;
                Q.add(i, k, -learnRate * sgd);
                loss += regItem * qik * qik;

                // update puk
                sgd = eui * qik + regUser * puk;
                P.add(u, k, -learnRate * sgd);
                loss += regUser * puk * puk;

                // update auk
                sgd = eui * qik * dev_ut + regUser * auk;
                Auk.add(u, k, -learnRate * sgd);
                loss += regUser * auk * auk;

                // update pkt
                sgd = eui * qik + regUser * pkt;
                delta = pkt - learnRate * sgd;
                Pkt.put(k, t, delta);
                loss += regUser * pkt * pkt;

                // update yjk
                for (int j : Ru) {
                    double yjk = Y.get(j, k);
                    sgd = eui * wi * qik + regItem * yjk;
                    Y.add(j, k, -learnRate * sgd);
                    loss += regItem * yjk * yjk;
                }
                /*if(Double.isNaN(loss))
                   System.out.println(); */
            }
        }

        loss *= 0.5;
        if (isConverged(iter))
            break;
    }
}

From source file:net.librec.recommender.FactorizationMachineRecommender.java

/**
 * recommend/* w  w  w .  ja  v  a  2  s  .  c o m*/
 * * predict the ratings in the test data
 *
 * @return predictive rating matrix
 * @throws LibrecException
 */
protected RecommendedList recommendRating() throws LibrecException {
    testMatrix = testTensor.rateMatrix();
    recommendedList = new RecommendedItemList(numUsers - 1, numUsers);

    // each user-item pair appears in the final recommend list only once
    Table<Integer, Integer, Double> ratingMapping = HashBasedTable.create();

    for (TensorEntry tensorEntry : testTensor) {
        int[] entryKeys = tensorEntry.keys();
        SparseVector featureVector = tenserKeysToFeatureVector(entryKeys);
        double predictRating = predict(featureVector, true);
        if (Double.isNaN(predictRating)) {
            predictRating = globalMean;
        }
        int[] userItemInd = getUserItemIndex(featureVector);
        int userIdx = userItemInd[0];
        int itemIdx = userItemInd[1];
        if (!ratingMapping.contains(userIdx, itemIdx)) {
            ratingMapping.put(userIdx, itemIdx, predictRating);
            recommendedList.addUserItemIdx(userIdx, itemIdx, predictRating);
        }
    }

    return recommendedList;
}

From source file:i5.las2peer.services.recommender.librec.rating.TimeNeighSVD.java

@Override
protected void buildModel() throws Exception {
    Logs.info("{}{} learn model parameters ...", new Object[] { algoName, foldInfo });
    for (int iter = 1; iter <= numIters; iter++) {
        loss = 0;//from ww w  .  j a v a2 s  .  c  o  m

        for (MatrixEntry me : trainMatrix) {
            int u = me.row();
            int i = me.column();
            double rui = me.get();

            long timestamp = (long) timeMatrix.get(u, i);
            // day t
            int t = days(timestamp, minTimestamp);
            int bin = bin(t);
            double dev_ut = dev(u, t);

            double bi = itemBias.get(i);
            double bit = Bit.get(i, bin);
            double bu = userBias.get(u);

            double cu = Cu.get(u);
            double cut = Cut.get(u, t);

            // lazy initialization
            if (!But.contains(u, t))
                But.put(u, t, Randoms.random());
            double but = But.get(u, t);

            double au = Alpha.get(u); // alpha_u

            double pui = globalMean + (bi + bit) * (cu + cut); // mu + bi(t)
            pui += bu + au * dev_ut + but; // bu(t)

            // qi * yj
            List<Integer> Ru = userItemsCache.get(u);
            double sum_y = 0;
            for (int j : Ru) {
                sum_y += DenseMatrix.rowMult(Y, j, Q, i);
            }
            double wi = Ru.size() > 0 ? Math.pow(Ru.size(), -0.5) : 0;
            pui += sum_y * wi;

            // qi * pu(t)
            if (!Pukt.containsKey(u)) {
                Table<Integer, Integer, Double> data = HashBasedTable.create();
                Pukt.put(u, data);
            }

            Table<Integer, Integer, Double> Pkt = Pukt.get(u);
            for (int k = 0; k < numFactors; k++) {
                double qik = Q.get(i, k);

                // lazy initialization
                if (!Pkt.contains(k, t))
                    Pkt.put(k, t, Randoms.random());

                double puk = P.get(u, k) + Auk.get(u, k) * dev_ut + Pkt.get(k, t);

                pui += puk * qik;
            }

            for (int j : Ru) {
                double e = decay(u, j, t);
                double ruj = trainMatrix.get(u, j);
                double buj = (itemBias.get(i) + Bit.get(i, bin)) * (Cu.get(u) + Cut.get(u, t));
                buj += userBias.get(u) + Alpha.get(u) * dev_ut;
                buj += But.contains(u, t) ? But.get(u, t) : 0;

                pui += e * ((ruj - buj) * W.get(i, j) + C.get(i, j)) * wi;
            }

            double eui = pui - rui;
            loss += eui * eui;

            // update bi
            double sgd = eui * (cu + cut) + regB * bi;
            itemBias.add(i, -lRate * sgd);
            loss += regB * bi * bi;

            // update bi,bin(t)
            sgd = eui * (cu + cut) + regB * bit;
            Bit.add(i, bin, -lRate * sgd);
            loss += regB * bit * bit;

            // update cu
            sgd = eui * (bi + bit) + regB * cu;
            Cu.add(u, -lRate * sgd);
            loss += regB * cu * cu;

            // update cut
            sgd = eui * (bi + bit) + regB * cut;
            Cut.add(u, t, -lRate * sgd);
            loss += regB * cut * cut;

            // update bu
            sgd = eui + regB * bu;
            userBias.add(u, -lRate * sgd);
            loss += regB * bu * bu;

            // update au
            sgd = eui * dev_ut + regB * au;
            Alpha.add(u, -lRate * sgd);
            loss += regB * au * au;

            // update but
            sgd = eui + regB * but;
            double delta = but - lRate * sgd;
            But.put(u, t, delta);
            loss += regB * but * but;

            for (int k = 0; k < numFactors; k++) {
                double qik = Q.get(i, k);
                double puk = P.get(u, k);
                double auk = Auk.get(u, k);
                double pkt = Pkt.get(k, t);

                // update qik
                double pukt = puk + auk * dev_ut + pkt;

                double sum_yk = 0;
                for (int j : Ru)
                    sum_yk += Y.get(j, k);

                sgd = eui * (pukt + wi * sum_yk) + regI * qik;
                Q.add(i, k, -lRate * sgd);
                loss += regI * qik * qik;

                // update puk
                sgd = eui * qik + regU * puk;
                P.add(u, k, -lRate * sgd);
                loss += regU * puk * puk;

                // update auk
                sgd = eui * qik * dev_ut + regU * auk;
                Auk.add(u, k, -lRate * sgd);
                loss += regU * auk * auk;

                // update pkt
                sgd = eui * qik + regU * pkt;
                delta = pkt - lRate * sgd;
                Pkt.put(k, t, delta);
                loss += regU * pkt * pkt;

                // update yjk
                for (int j : Ru) {
                    double yjk = Y.get(j, k);
                    sgd = eui * wi * qik + regI * yjk;
                    Y.add(j, k, -lRate * sgd);
                    loss += regI * yjk * yjk;
                }
            }

            // update w, c and mu
            // e^(-beta_u * |t-tj|)(ruj - buj) * wij + cij): eq. (16)
            // we use mu instead of beta since beta is already used for the time deviation in the baseline model
            for (int j : Ru) {
                double e = decay(u, j, t);
                double ruj = trainMatrix.get(u, j);
                double buj = (itemBias.get(i) + Bit.get(i, bin)) * (Cu.get(u) + Cut.get(u, t));
                buj += userBias.get(u) + Alpha.get(u) * dev_ut;
                buj += But.contains(u, t) ? But.get(u, t) : 0;

                // update w
                double wij = W.get(i, j);
                sgd = eui * wi * e * (ruj - buj) + regN * wij;
                W.add(i, j, -lRateN * sgd);
                loss += regI * wij * wij;

                // update c
                double cij = C.get(i, j);
                sgd = eui * wi * e + regN * cij;
                C.add(i, j, -lRateN * sgd);
                loss += regI * cij * cij;

                // update phi
                double phi = Phi.get(u);
                int diff = Math.abs(t - days((long) timeMatrix.get(u, j), minTimestamp));
                sgd = eui * wi * (-1 * diff) * e * ((ruj - buj) * wij + cij) + regN * phi;
                Phi.add(u, -lRateMu * sgd);
                loss += regI * phi * phi;
            }
        }

        loss *= 0.5;

        if (isConverged(iter))
            break;
    }
}

From source file:org.clueminer.eval.external.AdjustedRandCorrected.java

/**
 * Should count number of item with same assignment to <Cluster A, Class X>
 * Instances must have included information about class assignment. This
 * table is sometimes called contingency table
 *
 * Classes are in rows, Clusters are in columns
 *
 * @param clustering/*  ww w  . ja va2 s  .c  om*/
 * @return table with counts of items for each pair cluster, class
 */
public Table<String, String, Integer> contingencyTable(Clustering<E, C> clustering) {
    // a lookup table for storing correctly / incorrectly classified items
    Table<String, String, Integer> table = newTable();

    //Cluster current;
    Instance inst;
    String cluster, label;
    int cnt;
    for (Cluster<E> current : clustering) {
        for (int i = 0; i < current.size(); i++) {
            inst = current.instance(i);
            cluster = current.getName();
            Object klass = inst.classValue();
            if (klass != null) {
                label = klass.toString();
            } else {
                label = unknownLabel;
            }

            if (table.contains(label, cluster)) {
                cnt = table.get(label, cluster);
            } else {
                cnt = 0;
            }

            cnt++;
            table.put(label, cluster, cnt);
        }
    }
    return table;
}

From source file:com.ggvaidya.scinames.complexquery.ComplexQueryViewController.java

private TableColumn<NameCluster, String> createColumnFromPrecalc(String colName,
        Table<NameCluster, String, Set<String>> precalc) {
    TableColumn<NameCluster, String> column = new TableColumn<>(colName);
    column.cellValueFactoryProperty().set((TableColumn.CellDataFeatures<NameCluster, String> cdf) -> {
        NameCluster nc = cdf.getValue();

        // There might be columns found in some dataset but not in others
        // so we detect those cases here and put in "NA"s instead.
        String output = "NA";
        if (precalc.contains(nc, colName))
            output = precalc.get(nc, colName).stream().collect(Collectors.joining("; "));

        return new ReadOnlyStringWrapper(output);
    });//from w  w w.  java2  s  .c  o  m
    column.setPrefWidth(100.0);
    column.setEditable(false);
    return column;
}

From source file:com.ggvaidya.scinames.summary.HigherStabilityView.java

private void generateHigherTaxonomyList(String higherTaxonomyColName) {
    Project project = projectView.getProject();
    DatasetColumn GENUS = DatasetColumn.fakeColumnFor("genus");
    DatasetColumn column;/*w ww  .j  ava 2 s .  c o  m*/

    if (higherTaxonomyColName.equals(""))
        column = GENUS;
    else if (datasetColumns.contains(DatasetColumn.of(higherTaxonomyColName))) {
        column = DatasetColumn.of(higherTaxonomyColName);
    } else {
        // Don't actually change until we have a valid column name.
        return;
    }

    // Group names by dataset column.
    Table<String, Dataset, Set<Name>> namesByDataset = HashBasedTable.create();

    for (String dsName : datasetNamesInOrder) {
        Dataset ds = datasetNames.get(dsName);

        if (column == GENUS) {
            Map<String, List<Name>> rowsByGenus = ds.getNamesInAllRows().stream()
                    .collect(Collectors.groupingBy(n -> n.getGenus()));

            for (String genus : rowsByGenus.keySet()) {
                namesByDataset.put(genus, ds, new HashSet<>(rowsByGenus.get(genus)));
            }
        } else {
            Map<DatasetRow, Set<Name>> namesByRow = ds.getNamesByRow();

            for (DatasetRow row : namesByRow.keySet()) {
                String colValue = row.get(column);
                if (colValue == null)
                    colValue = "(null)";

                if (!namesByDataset.contains(colValue, ds))
                    namesByDataset.put(colValue, ds, new HashSet<>());

                namesByDataset.get(colValue, ds).addAll(namesByRow.getOrDefault(row, new HashSet<>()));
            }
        }
    }

    // LOGGER.info("namesByDataset = " + namesByDataset);

    // By this point, namesByDataset should be ready to go.
    // So let's fill out precalc.
    precalc.clear();

    for (String rowName : namesByDataset.rowKeySet()) {
        precalc.put(rowName, "HigherTaxon", rowName);

        Set<Name> prevNames = new HashSet<>();

        for (String dsName : datasetNamesInOrder) {
            Dataset ds = datasetNames.get(dsName);
            Set<Name> names = namesByDataset.get(rowName, ds);

            // Missing?! Oh noes.
            if (names == null)
                names = new HashSet<>();

            // For now, let's just note down how many names we have.
            precalc.put(rowName, dsName + "_with_synonymy", calculateDifferenceWithSynonymy(prevNames, names));
            precalc.put(rowName, dsName + "_without_synonymy",
                    calculateDifferenceWithoutSynonymy(prevNames, names));

            // Set up prevNames for next time 'round
            prevNames = names;
        }
    }

    // LOGGER.info("precalc = " + precalc);

    // Tell everybody what we did.
    higherTaxaList.clear();
    higherTaxaList.addAll(namesByDataset.rowKeySet());
    LOGGER.info("higherTaxaList = " + higherTaxaList);

    controller.getTableView().refresh();
}