Example usage for com.google.common.collect Table get

List of usage examples for com.google.common.collect Table get

Introduction

In this page you can find the example usage for com.google.common.collect Table get.

Prototype

V get(@Nullable Object rowKey, @Nullable Object columnKey);

Source Link

Document

Returns the value corresponding to the given row and column keys, or null if no such mapping exists.

Usage

From source file:org.apache.sentry.cli.tools.PermissionsMigrationToolCommon.java

private void migratePolicyFile() throws Exception {
    Configuration conf = getSentryConf();
    Path sourceFile = new Path(policyFile.get());
    SimpleFileProviderBackend policyFileBackend = new SimpleFileProviderBackend(conf, sourceFile);
    ProviderBackendContext ctx = new ProviderBackendContext();
    policyFileBackend.initialize(ctx);/*from  w w  w  .j  av a2  s .  co m*/

    Set<String> roles = Sets.newHashSet();
    Table<String, String, Set<String>> groupRolePrivilegeTable = policyFileBackend.getGroupRolePrivilegeTable();

    Ini output = PolicyFiles.loadFromPath(sourceFile.getFileSystem(conf), sourceFile);
    Ini.Section rolesSection = output.get(PolicyFileConstants.ROLES);

    for (String groupName : groupRolePrivilegeTable.rowKeySet()) {
        for (String roleName : policyFileBackend.getRoles(Collections.singleton(groupName),
                ActiveRoleSet.ALL)) {
            if (!roles.contains(roleName)) {
                // Do the actual migration
                Set<String> privileges = groupRolePrivilegeTable.get(groupName, roleName);
                Collection<String> migrated = transformPrivileges(privileges);

                if (!migrated.isEmpty()) {
                    LOGGER.info("{} For role {} migrating privileges from {} to {}", getDryRunMessage(),
                            roleName, privileges, migrated);
                    if (!dryRun) {
                        rolesSection.put(roleName, PrivilegeUtils.fromPrivilegeStrings(migrated));
                    }
                }

                roles.add(roleName);
            }
        }
    }

    if (!dryRun) {
        Path targetFile = new Path(outputFile.get());
        PolicyFiles.writeToPath(output, targetFile.getFileSystem(conf), targetFile);
        LOGGER.info("Successfully saved migrated Sentry policy file at {}", outputFile.get());
    }
}

From source file:com.android.builder.internal.SymbolWriter.java

public void write() throws IOException {
    Splitter splitter = Splitter.on('.');
    Iterable<String> folders = splitter.split(mPackageName);
    File file = new File(mOutFolder);
    for (String folder : folders) {
        file = new File(file, folder);
    }/*from   www  . j  a  va2  s.com*/
    file.mkdirs();
    file = new File(file, SdkConstants.FN_RESOURCE_CLASS);

    Closer closer = Closer.create();
    try {
        BufferedWriter writer = closer.register(Files.newWriter(file, Charsets.UTF_8));

        writer.write("/* AUTO-GENERATED FILE.  DO NOT MODIFY.\n");
        writer.write(" *\n");
        writer.write(" * This class was automatically generated by the\n");
        writer.write(" * aapt tool from the resource data it found.  It\n");
        writer.write(" * should not be modified by hand.\n");
        writer.write(" */\n");

        writer.write("package ");
        writer.write(mPackageName);
        writer.write(";\n\npublic final class R {\n");

        Table<String, String, SymbolEntry> symbols = getAllSymbols();
        Table<String, String, SymbolEntry> values = mValues.getSymbols();

        Set<String> rowSet = symbols.rowKeySet();
        List<String> rowList = Lists.newArrayList(rowSet);
        Collections.sort(rowList);

        for (String row : rowList) {
            writer.write("\tpublic static final class ");
            writer.write(row);
            writer.write(" {\n");

            Map<String, SymbolEntry> rowMap = symbols.row(row);
            Set<String> symbolSet = rowMap.keySet();
            ArrayList<String> symbolList = Lists.newArrayList(symbolSet);
            Collections.sort(symbolList);

            for (String symbolName : symbolList) {
                // get the matching SymbolEntry from the values Table.
                SymbolEntry value = values.get(row, symbolName);
                if (value != null) {
                    writer.write("\t\tpublic static final ");
                    writer.write(value.getType());
                    writer.write(" ");
                    writer.write(value.getName());
                    writer.write(" = ");
                    writer.write(value.getValue());
                    writer.write(";\n");
                }
            }

            writer.write("\t}\n");
        }

        writer.write("}\n");
    } catch (Throwable e) {
        throw closer.rethrow(e);
    } finally {
        closer.close();
    }
}

From source file:com.przemo.etl.dataproviders.CSVDataProvider.java

/**
 * Saves data to a CSV file. If no headers were provided, default ones are created, if previously defined to include headers.
 * @param Data// w  w  w.  java 2s.  c  om
 * @return 
 */
@Override
public boolean saveData(Table Data) {
    OutputStream wr = null;
    try {
        wr = new BufferedOutputStream(new FileOutputStream(this.file));

        if (dh) {
            if (headers == null) {
                headers = defaultHeaders(Data);
            }
            writeSeparatedData(wr, headers);
            System.out.println("Saved headers.");
        }
        Set columnsSet = Data.columnKeySet();
        String[] w = new String[columnsSet.size()];
        int i;
        int cint = 0;

        System.out.println("Start saving rows.");
        for (Object r : Data.rowKeySet()) {
            i = 0;
            System.out.println("Saving row no. " + cint);
            for (Object c : columnsSet) {
                Object v = Data.get(r, c);
                if (v != null) {
                    w[i] = v.toString();
                }
                i++;
            }
            cint++;
            System.out.println("Writing the row to the writer stream.");
            writeSeparatedData(wr, w);
            if (cint % 1000 == 0) {
                System.out.println("Saved row no. " + cint);
            }
        }
        wr.flush();
        return true;
    } catch (IOException ex) {
        Logger.getLogger(CSVDataProvider.class.getName()).log(Level.SEVERE, null, ex);
        return false;
    } finally {
        if (wr != null) {
            try {
                wr.close();
            } catch (IOException ex) {
                Logger.getLogger(CSVDataProvider.class.getName()).log(Level.SEVERE, null, ex);
            }
        }
    }
}

From source file:org.clueminer.eval.external.AdjustedRandCorrected.java

/**
 * Should count number of item with same assignment to <Cluster A, Class X>
 * Instances must have included information about class assignment. This
 * table is sometimes called contingency table
 *
 * Classes are in rows, Clusters are in columns
 *
 * @param clustering//  w  w  w.  ja v a 2s . c  om
 * @return table with counts of items for each pair cluster, class
 */
public Table<String, String, Integer> contingencyTable(Clustering<E, C> clustering) {
    // a lookup table for storing correctly / incorrectly classified items
    Table<String, String, Integer> table = newTable();

    //Cluster current;
    Instance inst;
    String cluster, label;
    int cnt;
    for (Cluster<E> current : clustering) {
        for (int i = 0; i < current.size(); i++) {
            inst = current.instance(i);
            cluster = current.getName();
            Object klass = inst.classValue();
            if (klass != null) {
                label = klass.toString();
            } else {
                label = unknownLabel;
            }

            if (table.contains(label, cluster)) {
                cnt = table.get(label, cluster);
            } else {
                cnt = 0;
            }

            cnt++;
            table.put(label, cluster, cnt);
        }
    }
    return table;
}

From source file:net.librec.recommender.context.rating.TimeSVDRecommender.java

@Override
protected void trainModel() throws LibrecException {
    for (int iter = 1; iter <= numIterations; iter++) {
        loss = 0;//w w w  .j av  a  2 s.c  o m

        for (MatrixEntry me : trainMatrix) {
            int u = me.row();
            int i = me.column();
            double rui = me.get();

            long timestamp = (long) timeMatrix.get(u, i);
            // day t
            int t = days(timestamp, minTimestamp);
            int bin = bin(t);
            double dev_ut = dev(u, t);

            double bi = itemBiases.get(i);
            double bit = Bit.get(i, bin);
            double bu = userBiases.get(u);

            double cu = Cu.get(u);
            double cut = Cut.get(u, t);

            // lazy initialization
            if (!But.contains(u, t))
                But.put(u, t, Randoms.random());
            double but = But.get(u, t);

            double au = Alpha.get(u); // alpha_u

            double pui = globalMean + (bi + bit) * (cu + cut); // mu + bi(t)
            pui += bu + au * dev_ut + but; // bu(t)

            // qi * yj
            List<Integer> Ru = null;
            try {
                Ru = userItemsCache.get(u);
            } catch (ExecutionException e) {
                e.printStackTrace();
            }

            double sum_y = 0;
            for (int j : Ru) {
                sum_y += DenseMatrix.rowMult(Y, j, Q, i);
            }
            double wi = Ru.size() > 0 ? Math.pow(Ru.size(), -0.5) : 0;
            pui += sum_y * wi;

            // qi * pu(t)
            if (!Pukt.containsKey(u)) {
                Table<Integer, Integer, Double> data = HashBasedTable.create();
                Pukt.put(u, data);
            }

            Table<Integer, Integer, Double> Pkt = Pukt.get(u);
            for (int k = 0; k < numFactors; k++) {
                double qik = Q.get(i, k);

                // lazy initialization
                if (!Pkt.contains(k, t))
                    Pkt.put(k, t, Randoms.random());

                double puk = P.get(u, k) + Auk.get(u, k) * dev_ut + Pkt.get(k, t);

                pui += puk * qik;
            }

            double eui = pui - rui;
            loss += eui * eui;

            // update bi
            double sgd = eui * (cu + cut) + regBias * bi;
            itemBiases.add(i, -learnRate * sgd);
            loss += regBias * bi * bi;

            // update bi,bin(t)
            sgd = eui * (cu + cut) + regBias * bit;
            Bit.add(i, bin, -learnRate * sgd);
            loss += regBias * bit * bit;

            // update cu
            sgd = eui * (bi + bit) + regBias * cu;
            Cu.add(u, -learnRate * sgd);
            loss += regBias * cu * cu;

            // update cut
            sgd = eui * (bi + bit) + regBias * cut;
            Cut.add(u, t, -learnRate * sgd);
            loss += regBias * cut * cut;

            // update bu
            sgd = eui + regBias * bu;
            userBiases.add(u, -learnRate * sgd);
            loss += regBias * bu * bu;

            // update au
            sgd = eui * dev_ut + regBias * au;
            Alpha.add(u, -learnRate * sgd);
            loss += regBias * au * au;

            // update but
            sgd = eui + regBias * but;
            double delta = but - learnRate * sgd;
            But.put(u, t, delta);
            loss += regBias * but * but;

            for (int k = 0; k < numFactors; k++) {
                double qik = Q.get(i, k);
                double puk = P.get(u, k);
                double auk = Auk.get(u, k);
                double pkt = Pkt.get(k, t);

                // update qik
                double pukt = puk + auk * dev_ut + pkt;

                double sum_yk = 0;
                for (int j : Ru)
                    sum_yk += Y.get(j, k);

                sgd = eui * (pukt + wi * sum_yk) + regItem * qik;
                Q.add(i, k, -learnRate * sgd);
                loss += regItem * qik * qik;

                // update puk
                sgd = eui * qik + regUser * puk;
                P.add(u, k, -learnRate * sgd);
                loss += regUser * puk * puk;

                // update auk
                sgd = eui * qik * dev_ut + regUser * auk;
                Auk.add(u, k, -learnRate * sgd);
                loss += regUser * auk * auk;

                // update pkt
                sgd = eui * qik + regUser * pkt;
                delta = pkt - learnRate * sgd;
                Pkt.put(k, t, delta);
                loss += regUser * pkt * pkt;

                // update yjk
                for (int j : Ru) {
                    double yjk = Y.get(j, k);
                    sgd = eui * wi * qik + regItem * yjk;
                    Y.add(j, k, -learnRate * sgd);
                    loss += regItem * yjk * yjk;
                }
                /*if(Double.isNaN(loss))
                   System.out.println(); */
            }
        }

        loss *= 0.5;
        if (isConverged(iter))
            break;
    }
}

From source file:org.apache.sentry.cli.tools.SentryConfigToolSolr.java

/**
 * Convert policy file to solrctl commands -- based on SENTRY-480
 *///  ww w .j  av a 2s  . co  m
private void convertINIToSentryServiceCmds(String component, String service, String requestorName,
        Configuration conf, SentryGenericServiceClient client, String policyFile, boolean validate,
        boolean importPolicy, boolean checkCompat) throws Exception {

    //instantiate a file providerBackend for parsing
    LOGGER.info("Reading policy file at: " + policyFile);
    SimpleFileProviderBackend policyFileBackend = new SimpleFileProviderBackend(conf, policyFile);
    ProviderBackendContext context = new ProviderBackendContext();
    context.setValidators(SolrPrivilegeModel.getInstance().getPrivilegeValidators());
    policyFileBackend.initialize(context);
    if (validate) {
        validatePolicy(policyFileBackend);
    }

    if (checkCompat) {
        checkCompat(policyFileBackend);
    }

    //import the relations about group,role and privilege into the DB store
    Set<String> roles = Sets.newHashSet();
    Table<String, String, Set<String>> groupRolePrivilegeTable = policyFileBackend.getGroupRolePrivilegeTable();
    GenericPrivilegeConverter converter = new GenericPrivilegeConverter(component, service, false);

    for (String groupName : groupRolePrivilegeTable.rowKeySet()) {
        for (String roleName : groupRolePrivilegeTable.columnKeySet()) {
            if (!roles.contains(roleName)) {
                LOGGER.info(dryRunMessage(importPolicy) + "Creating role: " + roleName.toLowerCase(Locale.US));
                if (importPolicy) {
                    client.createRoleIfNotExist(requestorName, roleName, component);
                }
                roles.add(roleName);
            }

            Set<String> privileges = groupRolePrivilegeTable.get(groupName, roleName);
            if (privileges == null) {
                continue;
            }
            LOGGER.info(dryRunMessage(importPolicy) + "Adding role: " + roleName.toLowerCase(Locale.US)
                    + " to group: " + groupName);
            if (importPolicy) {
                client.grantRoleToGroups(requestorName, roleName, component, Sets.newHashSet(groupName));
            }

            for (String permission : privileges) {
                String action = null;

                for (String authorizable : SentryConstants.AUTHORIZABLE_SPLITTER.trimResults()
                        .split(permission)) {
                    KeyValue kv = new KeyValue(authorizable);
                    String key = kv.getKey();
                    String value = kv.getValue();
                    if ("action".equalsIgnoreCase(key)) {
                        action = value;
                    }
                }

                // Service doesn't support not specifying action
                if (action == null) {
                    permission += "->action=" + Action.ALL;
                }
                LOGGER.info(dryRunMessage(importPolicy) + "Adding permission: " + permission + " to role: "
                        + roleName.toLowerCase(Locale.US));
                if (importPolicy) {
                    client.grantPrivilege(requestorName, roleName, component, converter.fromString(permission));
                }
            }
        }
    }
}

From source file:org.apache.sentry.provider.db.generic.tools.SentryConfigToolSolr.java

/**
 * Convert policy file to solrctl commands -- based on SENTRY-480
 *///  w w w. ja  v a  2  s . c o m
private void convertINIToSentryServiceCmds(String component, String service, String requestorName,
        Configuration conf, SentryGenericServiceClient client, String policyFile, boolean validate,
        boolean importPolicy, boolean checkCompat) throws Exception {

    //instantiate a file providerBackend for parsing
    LOGGER.info("Reading policy file at: " + policyFile);
    SimpleFileProviderBackend policyFileBackend = new SimpleFileProviderBackend(conf, policyFile);
    ProviderBackendContext context = new ProviderBackendContext();
    context.setValidators(SearchPrivilegeModel.getInstance().getPrivilegeValidators());
    policyFileBackend.initialize(context);
    if (validate) {
        validatePolicy(policyFileBackend);
    }

    if (checkCompat) {
        checkCompat(policyFileBackend);
    }

    //import the relations about group,role and privilege into the DB store
    Set<String> roles = Sets.newHashSet();
    Table<String, String, Set<String>> groupRolePrivilegeTable = policyFileBackend.getGroupRolePrivilegeTable();
    SolrTSentryPrivilegeConverter converter = new SolrTSentryPrivilegeConverter(component, service, false);

    for (String groupName : groupRolePrivilegeTable.rowKeySet()) {
        for (String roleName : groupRolePrivilegeTable.columnKeySet()) {
            if (!roles.contains(roleName)) {
                LOGGER.info(dryRunMessage(importPolicy) + "Creating role: " + roleName.toLowerCase(Locale.US));
                if (importPolicy) {
                    client.createRoleIfNotExist(requestorName, roleName, component);
                }
                roles.add(roleName);
            }

            Set<String> privileges = groupRolePrivilegeTable.get(groupName, roleName);
            if (privileges == null) {
                continue;
            }
            LOGGER.info(dryRunMessage(importPolicy) + "Adding role: " + roleName.toLowerCase(Locale.US)
                    + " to group: " + groupName);
            if (importPolicy) {
                client.addRoleToGroups(requestorName, roleName, component, Sets.newHashSet(groupName));
            }

            for (String permission : privileges) {
                String action = null;

                for (String authorizable : SentryConstants.AUTHORIZABLE_SPLITTER.trimResults()
                        .split(permission)) {
                    KeyValue kv = new KeyValue(authorizable);
                    String key = kv.getKey();
                    String value = kv.getValue();
                    if ("action".equalsIgnoreCase(key)) {
                        action = value;
                    }
                }

                // Service doesn't support not specifying action
                if (action == null) {
                    permission += "->action=" + Action.ALL;
                }
                LOGGER.info(dryRunMessage(importPolicy) + "Adding permission: " + permission + " to role: "
                        + roleName.toLowerCase(Locale.US));
                if (importPolicy) {
                    client.grantPrivilege(requestorName, roleName, component, converter.fromString(permission));
                }
            }
        }
    }
}

From source file:i5.las2peer.services.recommender.librec.rating.TimeNeighSVD.java

@Override
protected void buildModel() throws Exception {
    Logs.info("{}{} learn model parameters ...", new Object[] { algoName, foldInfo });
    for (int iter = 1; iter <= numIters; iter++) {
        loss = 0;// ww w  .  j a v a2  s.com

        for (MatrixEntry me : trainMatrix) {
            int u = me.row();
            int i = me.column();
            double rui = me.get();

            long timestamp = (long) timeMatrix.get(u, i);
            // day t
            int t = days(timestamp, minTimestamp);
            int bin = bin(t);
            double dev_ut = dev(u, t);

            double bi = itemBias.get(i);
            double bit = Bit.get(i, bin);
            double bu = userBias.get(u);

            double cu = Cu.get(u);
            double cut = Cut.get(u, t);

            // lazy initialization
            if (!But.contains(u, t))
                But.put(u, t, Randoms.random());
            double but = But.get(u, t);

            double au = Alpha.get(u); // alpha_u

            double pui = globalMean + (bi + bit) * (cu + cut); // mu + bi(t)
            pui += bu + au * dev_ut + but; // bu(t)

            // qi * yj
            List<Integer> Ru = userItemsCache.get(u);
            double sum_y = 0;
            for (int j : Ru) {
                sum_y += DenseMatrix.rowMult(Y, j, Q, i);
            }
            double wi = Ru.size() > 0 ? Math.pow(Ru.size(), -0.5) : 0;
            pui += sum_y * wi;

            // qi * pu(t)
            if (!Pukt.containsKey(u)) {
                Table<Integer, Integer, Double> data = HashBasedTable.create();
                Pukt.put(u, data);
            }

            Table<Integer, Integer, Double> Pkt = Pukt.get(u);
            for (int k = 0; k < numFactors; k++) {
                double qik = Q.get(i, k);

                // lazy initialization
                if (!Pkt.contains(k, t))
                    Pkt.put(k, t, Randoms.random());

                double puk = P.get(u, k) + Auk.get(u, k) * dev_ut + Pkt.get(k, t);

                pui += puk * qik;
            }

            for (int j : Ru) {
                double e = decay(u, j, t);
                double ruj = trainMatrix.get(u, j);
                double buj = (itemBias.get(i) + Bit.get(i, bin)) * (Cu.get(u) + Cut.get(u, t));
                buj += userBias.get(u) + Alpha.get(u) * dev_ut;
                buj += But.contains(u, t) ? But.get(u, t) : 0;

                pui += e * ((ruj - buj) * W.get(i, j) + C.get(i, j)) * wi;
            }

            double eui = pui - rui;
            loss += eui * eui;

            // update bi
            double sgd = eui * (cu + cut) + regB * bi;
            itemBias.add(i, -lRate * sgd);
            loss += regB * bi * bi;

            // update bi,bin(t)
            sgd = eui * (cu + cut) + regB * bit;
            Bit.add(i, bin, -lRate * sgd);
            loss += regB * bit * bit;

            // update cu
            sgd = eui * (bi + bit) + regB * cu;
            Cu.add(u, -lRate * sgd);
            loss += regB * cu * cu;

            // update cut
            sgd = eui * (bi + bit) + regB * cut;
            Cut.add(u, t, -lRate * sgd);
            loss += regB * cut * cut;

            // update bu
            sgd = eui + regB * bu;
            userBias.add(u, -lRate * sgd);
            loss += regB * bu * bu;

            // update au
            sgd = eui * dev_ut + regB * au;
            Alpha.add(u, -lRate * sgd);
            loss += regB * au * au;

            // update but
            sgd = eui + regB * but;
            double delta = but - lRate * sgd;
            But.put(u, t, delta);
            loss += regB * but * but;

            for (int k = 0; k < numFactors; k++) {
                double qik = Q.get(i, k);
                double puk = P.get(u, k);
                double auk = Auk.get(u, k);
                double pkt = Pkt.get(k, t);

                // update qik
                double pukt = puk + auk * dev_ut + pkt;

                double sum_yk = 0;
                for (int j : Ru)
                    sum_yk += Y.get(j, k);

                sgd = eui * (pukt + wi * sum_yk) + regI * qik;
                Q.add(i, k, -lRate * sgd);
                loss += regI * qik * qik;

                // update puk
                sgd = eui * qik + regU * puk;
                P.add(u, k, -lRate * sgd);
                loss += regU * puk * puk;

                // update auk
                sgd = eui * qik * dev_ut + regU * auk;
                Auk.add(u, k, -lRate * sgd);
                loss += regU * auk * auk;

                // update pkt
                sgd = eui * qik + regU * pkt;
                delta = pkt - lRate * sgd;
                Pkt.put(k, t, delta);
                loss += regU * pkt * pkt;

                // update yjk
                for (int j : Ru) {
                    double yjk = Y.get(j, k);
                    sgd = eui * wi * qik + regI * yjk;
                    Y.add(j, k, -lRate * sgd);
                    loss += regI * yjk * yjk;
                }
            }

            // update w, c and mu
            // e^(-beta_u * |t-tj|)(ruj - buj) * wij + cij): eq. (16)
            // we use mu instead of beta since beta is already used for the time deviation in the baseline model
            for (int j : Ru) {
                double e = decay(u, j, t);
                double ruj = trainMatrix.get(u, j);
                double buj = (itemBias.get(i) + Bit.get(i, bin)) * (Cu.get(u) + Cut.get(u, t));
                buj += userBias.get(u) + Alpha.get(u) * dev_ut;
                buj += But.contains(u, t) ? But.get(u, t) : 0;

                // update w
                double wij = W.get(i, j);
                sgd = eui * wi * e * (ruj - buj) + regN * wij;
                W.add(i, j, -lRateN * sgd);
                loss += regI * wij * wij;

                // update c
                double cij = C.get(i, j);
                sgd = eui * wi * e + regN * cij;
                C.add(i, j, -lRateN * sgd);
                loss += regI * cij * cij;

                // update phi
                double phi = Phi.get(u);
                int diff = Math.abs(t - days((long) timeMatrix.get(u, j), minTimestamp));
                sgd = eui * wi * (-1 * diff) * e * ((ruj - buj) * wij + cij) + regN * phi;
                Phi.add(u, -lRateMu * sgd);
                loss += regI * phi * phi;
            }
        }

        loss *= 0.5;

        if (isConverged(iter))
            break;
    }
}

From source file:de.tum.bgu.msm.models.demography.DefaultMarriageModel.java

private MarriageMarket defineMarriageMarket(Collection<Person> persons) {

    LOGGER.info("Defining Marriage Market");

    final List<Person> activePartners = new ArrayList<>();
    final Table<Integer, Gender, List<Person>> partnersByAgeAndGender = ArrayTable.create(
            ContiguousSet.create(Range.closed(16, 100), DiscreteDomain.integers()),
            Arrays.asList(Gender.values()));

    for (final Person pp : persons) {
        if (ruleGetMarried(pp)) {
            final double marryProb = getMarryProb(pp);
            if (SiloUtil.getRandomNumberAsDouble() <= marryProb) {
                activePartners.add(pp);/* ww  w  . j  a  va2 s.com*/
            } else if (isQualifiedAsPossiblePartner(pp)) {
                final List<Person> entry = partnersByAgeAndGender.get(pp.getAge(), pp.getGender());
                if (entry == null) {
                    partnersByAgeAndGender.put(pp.getAge(), pp.getGender(), Lists.newArrayList(pp));
                } else {
                    entry.add(pp);
                }
            }
        }
    }
    LOGGER.info(activePartners.size() + " persons actively looking for partner");
    return new MarriageMarket(activePartners, partnersByAgeAndGender);
}

From source file:edu.mit.streamjit.impl.compiler2.WorkerActor.java

/**
 * Sets up Actor connections based on the worker's predecessor/successor
 * relationships, creating TokenActors and Storages as required.  This
 * method depends on all Storage objects initially being single-input,
 * single-output, and all Tokens being single-input, single-output (which
 * they should always be by their nature).
 * @param workers an immutable map of Workers to their Actors; workers not
 * in the map are not in this blob//w ww.  ja va2s  . c  o  m
 * @param tokens a map of Tokens to their Actors, being constructed as we go
 * @param storage a table of (upstream, downstream) Actor to the Storage
 * that connects them, being constructed as we go
 * @param inputTokenId an int-by-value containing the next input TokenActor id, to be
 * incremented after use
 * @param outputTokenId an int-by-value containing the next output TokenActor id, to be
 * decremented after use
 */
public void connect(ImmutableMap<Worker<?, ?>, WorkerActor> workers, Map<Token, TokenActor> tokens,
        Table<Actor, Actor, Storage> storage, int[] inputTokenId, int[] outputTokenId) {
    List<? extends Worker<?, ?>> predecessors = Workers.getPredecessors(worker);
    if (predecessors.isEmpty()) {
        Blob.Token t = Blob.Token.createOverallInputToken(worker);
        TokenActor ta = new TokenActor(t, inputTokenId[0]++);
        tokens.put(t, ta);
        Storage s = new Storage(ta, this);
        inputs().add(s);
        ta.outputs().add(s);
        storage.put(ta, this, s);
    }
    for (Worker<?, ?> w : predecessors) {
        Actor pred = workers.get(w);
        if (pred == null) {
            Token t = new Blob.Token(w, worker());
            pred = new TokenActor(t, inputTokenId[0]++);
            tokens.put(t, (TokenActor) pred);
        }
        Storage s = storage.get(pred, this);
        if (s == null) {
            s = new Storage(pred, this);
            storage.put(pred, this, s);
        }
        inputs().add(s);
        if (pred instanceof TokenActor)
            pred.outputs().add(s);
    }

    List<? extends Worker<?, ?>> successors = Workers.getSuccessors(worker);
    if (successors.isEmpty()) {
        Blob.Token t = Blob.Token.createOverallOutputToken(worker);
        TokenActor ta = new TokenActor(t, outputTokenId[0]--);
        tokens.put(t, ta);
        Storage s = new Storage(this, ta);
        outputs().add(s);
        ta.inputs().add(s);
        storage.put(this, ta, s);
    }
    for (Worker<?, ?> w : successors) {
        Actor succ = workers.get(w);
        if (succ == null) {
            Token t = new Blob.Token(worker(), w);
            succ = new TokenActor(t, outputTokenId[0]--);
            tokens.put(t, (TokenActor) succ);
        }
        Storage s = storage.get(this, succ);
        if (s == null) {
            s = new Storage(this, succ);
            storage.put(this, succ, s);
        }
        outputs().add(s);
        if (succ instanceof TokenActor)
            succ.inputs().add(s);
    }

    inputIndexFunctions().addAll(Collections.nCopies(inputs().size(), IndexFunction.identity()));
    outputIndexFunctions().addAll(Collections.nCopies(outputs().size(), IndexFunction.identity()));
}