Example usage for com.google.common.collect Table put

List of usage examples for com.google.common.collect Table put

Introduction

In this page you can find the example usage for com.google.common.collect Table put.

Prototype

@Nullable
V put(R rowKey, C columnKey, V value);

Source Link

Document

Associates the specified value with the specified keys.

Usage

From source file:com.przemo.myloader.DateTransformation.java

public DateTransformation(final int ShiftHours) {

    this.transformation = new Function() {

        @Override//from  ww  w .  j av a2s. c  o  m
        public Object apply(Object f) {
            if (f instanceof Table) {
                Table d = (Table) f;
                for (Object o : d.rowKeySet()) {
                    try {
                        int[] dateInt = makeDate(String.valueOf(d.get(o, "Column 0")));
                        shiftHours(dateInt, ShiftHours);
                        d.put(o, "date", dateInt[0]);
                        d.put(o, "time", dateInt[1]);
                    } catch (Exception ex) {
                        Logger.getLogger(Application.class.getName()).log(Level.SEVERE, null, ex);
                    }
                }
            }
            return f;
        }

        private int[] makeDate(String dstr) throws Exception {
            int[] r = new int[2];
            String[] d = dstr.split(" ");
            if (d.length != 2) {
                throw new Exception("Wrong date format!");
            } else {
                r[0] = Integer.parseInt(d[0]);
                r[1] = Integer.parseInt(d[1]);
            }
            return r;
        }
    };
}

From source file:heros.solver.SummaryFunctions.java

/**
 * Inserts a summary function.//  www  .ja  va 2s. c  o m
 * @param callSite The call site with which this function is associated.
 * @param sourceVal The source value at the call site. 
 * @param retSite The return site (in the caller) with which this function is associated.
 * @param targetVal The target value at the return site.
 * @param function The edge function used to compute V-type values from the source node to the target node.  
 */
public synchronized void insertFunction(N callSite, D sourceVal, N retSite, D targetVal,
        EdgeFunction<V> function) {
    assert callSite != null;
    assert sourceVal != null;
    assert retSite != null;
    assert targetVal != null;
    assert function != null;

    Table<N, D, EdgeFunction<V>> targetAndTargetValToFunction = table.get(callSite, sourceVal);
    if (targetAndTargetValToFunction == null) {
        targetAndTargetValToFunction = HashBasedTable.create();
        table.put(callSite, sourceVal, targetAndTargetValToFunction);
    }
    targetAndTargetValToFunction.put(retSite, targetVal, function);
}

From source file:es.usc.citius.composit.wsc08.data.matcher.WSCMatchGraph.java

public WSCMatchGraph(HierarchicalKnowledgeBase kb) {
    this.kb = kb;
    // Build a table using the kb and using exact/plugin match.
    Stopwatch w = Stopwatch.createStarted();
    Table<Concept, Concept, Boolean> table = HashBasedTable.create();
    for (Concept source : kb.getConcepts()) {
        Set<Concept> set = new HashSet<Concept>(kb.getSuperclasses(source));
        set.add(source);/*from   w ww.ja v  a  2s  .  c  o  m*/
        for (Concept target : set) {
            table.put(source, target, true);
        }
    }
    this.matchGraph = new HashMatchGraph<Concept, Boolean>(new MatchTable<Concept, Boolean>(table));
    logger.debug("MatchGraph computed in {}", w.stop().toString());
}

From source file:heros.solver.JumpFunctions.java

/**
 * Records a jump function. The source statement is implicit.
 * @see PathEdge/* w  w w  .java 2 s  .  c o  m*/
 */
public synchronized void addFunction(D sourceVal, N target, D targetVal, EdgeFunction<L> function) {
    assert sourceVal != null;
    assert target != null;
    assert targetVal != null;
    assert function != null;

    //we do not store the default function (all-top)
    if (function.equalTo(allTop))
        return;

    Map<D, EdgeFunction<L>> sourceValToFunc = nonEmptyReverseLookup.get(target, targetVal);
    if (sourceValToFunc == null) {
        sourceValToFunc = new LinkedHashMap<D, EdgeFunction<L>>();
        nonEmptyReverseLookup.put(target, targetVal, sourceValToFunc);
    }
    sourceValToFunc.put(sourceVal, function);

    Map<D, EdgeFunction<L>> targetValToFunc = nonEmptyForwardLookup.get(sourceVal, target);
    if (targetValToFunc == null) {
        targetValToFunc = new LinkedHashMap<D, EdgeFunction<L>>();
        nonEmptyForwardLookup.put(sourceVal, target, targetValToFunc);
    }
    targetValToFunc.put(targetVal, function);

    Table<D, D, EdgeFunction<L>> table = nonEmptyLookupByTargetNode.get(target);
    if (table == null) {
        table = HashBasedTable.create();
        nonEmptyLookupByTargetNode.put(target, table);
    }
    table.put(sourceVal, targetVal, function);
}

From source file:com.torodb.torod.db.postgresql.meta.routines.QueryRoutine.java

@Nonnull
private static List<SplitDocument> translateDocuments(CollectionSchema colSchema, int expectedDocs,
        ResultSet rs) {/*from  w ww  . ja  v a  2  s  . c o  m*/
    try {
        List<SplitDocument> result = Lists.newArrayListWithCapacity(expectedDocs);

        Table<Integer, Integer, String> docInfo = HashBasedTable.create();

        Integer lastDocId = null;
        Integer structureId = null;
        while (rs.next()) {
            int docId = rs.getInt(DOC_ID);
            if (lastDocId == null || lastDocId != docId) {
                if (lastDocId != null) { //if this is not the first iteration
                    SplitDocument doc = processDocument(colSchema, lastDocId, structureId, docInfo);
                    result.add(doc);
                }
                lastDocId = docId;
                structureId = null;
                assert docInfo.isEmpty();
            }

            Object typeId = rs.getObject(TYPE_ID);
            Object index = rs.getObject(INDEX);
            String json = rs.getString(_JSON);

            if (typeId != null) { //subdocument
                assert typeId instanceof Integer;
                assert index == null || index instanceof Integer;
                assert json != null;

                if (index == null) {
                    index = 0;
                }

                docInfo.put((Integer) typeId, (Integer) index, json);
            } else { //metainfo
                assert index != null;
                assert json == null;

                structureId = (Integer) index;
            }
        }
        if (lastDocId != null) {
            SplitDocument doc = processDocument(colSchema, lastDocId, structureId, docInfo);
            result.add(doc);
        }

        return result;
    } catch (SQLException ex) {
        //TODO: Change exception
        throw new RuntimeException(ex);
    }
}

From source file:alys.NaiveBayes.java

public void init(ArrayList<ArrayList<String>> data) {
    // Set data size
    rowSize = data.get(0).size();//  ww w.j av  a  2 s .c om
    colSize = data.size();

    modelTable = new Table[colSize - 1];
    ArrayList<String> classRow = data.get(data.size() - 1);

    // Set classNames
    classNames = new HashSet<String>(Arrays.asList(classRow.toArray(new String[classRow.size()])))
            .toArray(new String[0]);

    // Hitung jumlah kemunculan kelas
    Map<String, Integer> classNameCount;
    classNameCount = countClassName(classRow);

    // Initialize each Table in modelTable
    for (int i = 0; i < modelTable.length; i++) {
        modelTable[i] = HashBasedTable.create();
    }

    // Hitung kemunculan atribut
    for (int i = 0; i < colSize - 1; ++i) {
        for (int j = 0; j < rowSize; ++j) {
            if (modelTable[i].contains(data.get(i).get(j), classRow.get(j))) {
                Double count = modelTable[i].get(data.get(i).get(j), classRow.get(j));
                modelTable[i].put(data.get(i).get(j), classRow.get(j), count + 1);
            } else {
                modelTable[i].put(data.get(i).get(j), classRow.get(j), new Double(1));
            }
        }
    }

    // Hitung probabilitas kemunculan atribut
    for (Table<String, String, Double> a : modelTable) {
        for (Table.Cell<String, String, Double> b : a.cellSet()) {
            a.put(b.getRowKey(), b.getColumnKey(), b.getValue() / classNameCount.get(b.getColumnKey()));
        }
    }
}

From source file:com.google.gerrit.server.ApprovalCopier.java

private Iterable<PatchSetApproval> getForPatchSet(ReviewDb db, ChangeControl ctl, PatchSet ps,
        Iterable<PatchSetApproval> dontCopy) throws OrmException {
    checkNotNull(ps, "ps should not be null");
    ChangeData cd = changeDataFactory.create(db, ctl);
    try {// www  . j a  v  a2  s  .  c o  m
        ProjectState project = projectCache.checkedGet(cd.change().getDest().getParentKey());
        ListMultimap<PatchSet.Id, PatchSetApproval> all = cd.approvals();
        checkNotNull(all, "all should not be null");

        Table<String, Account.Id, PatchSetApproval> wontCopy = HashBasedTable.create();
        for (PatchSetApproval psa : dontCopy) {
            wontCopy.put(psa.getLabel(), psa.getAccountId(), psa);
        }

        Table<String, Account.Id, PatchSetApproval> byUser = HashBasedTable.create();
        for (PatchSetApproval psa : all.get(ps.getId())) {
            if (!wontCopy.contains(psa.getLabel(), psa.getAccountId())) {
                byUser.put(psa.getLabel(), psa.getAccountId(), psa);
            }
        }

        TreeMap<Integer, PatchSet> patchSets = getPatchSets(cd);

        try (Repository repo = repoManager.openRepository(project.getProject().getNameKey());
                RevWalk rw = new RevWalk(repo)) {
            // Walk patch sets strictly less than current in descending order.
            Collection<PatchSet> allPrior = patchSets.descendingMap().tailMap(ps.getId().get(), false).values();
            for (PatchSet priorPs : allPrior) {
                List<PatchSetApproval> priorApprovals = all.get(priorPs.getId());
                if (priorApprovals.isEmpty()) {
                    continue;
                }

                ChangeKind kind = changeKindCache.getChangeKind(project.getProject().getNameKey(), rw,
                        repo.getConfig(), ObjectId.fromString(priorPs.getRevision().get()),
                        ObjectId.fromString(ps.getRevision().get()));

                for (PatchSetApproval psa : priorApprovals) {
                    if (wontCopy.contains(psa.getLabel(), psa.getAccountId())) {
                        continue;
                    }
                    if (byUser.contains(psa.getLabel(), psa.getAccountId())) {
                        continue;
                    }
                    if (!canCopy(project, psa, ps.getId(), kind)) {
                        wontCopy.put(psa.getLabel(), psa.getAccountId(), psa);
                        continue;
                    }
                    byUser.put(psa.getLabel(), psa.getAccountId(), copy(psa, ps.getId()));
                }
            }
            return labelNormalizer.normalize(ctl, byUser.values()).getNormalized();
        }
    } catch (IOException e) {
        throw new OrmException(e);
    }
}

From source file:com.facebook.buck.android.ResourceFilters.java

/**
 * Takes a list of image files (as paths), and a target density (mdpi, hdpi, xhdpi), and
 * returns a list of files which can be safely left out when building an APK for phones with that
 * screen density. That APK will run on other screens as well but look worse due to scaling.
 * <p>//from   w  w  w.  j a v a 2  s  .  co m
 * Each combination of non-density qualifiers is processed separately. For example, if we have
 * {@code drawable-hdpi, drawable-mdpi, drawable-xhdpi, drawable-hdpi-ro}, for a target of {@code
 * mdpi}, we'll be keeping {@code drawable-mdpi, drawable-hdpi-ro}.
 * @param candidates list of paths to image files
 * @param targetDensities densities we want to keep
 * @param canDownscale do we have access to an image scaler
 * @return set of files to remove
 */
@VisibleForTesting
static Set<Path> filterByDensity(Collection<Path> candidates, Set<ResourceFilters.Density> targetDensities,
        boolean canDownscale) {
    ImmutableSet.Builder<Path> removals = ImmutableSet.builder();

    Table<String, Density, Path> imageValues = HashBasedTable.create();

    // Create mappings for drawables. If candidate == "<base>/drawable-<dpi>-<other>/<filename>",
    // then we'll record a mapping of the form ("<base>/<filename>/<other>", "<dpi>") -> candidate.
    // For example:
    //                                    mdpi                               hdpi
    //                       --------------------------------------------------------------------
    // key: res/some.png/    |  res/drawable-mdpi/some.png          res/drawable-hdpi/some.png
    // key: res/some.png/fr  |  res/drawable-fr-hdpi/some.png
    for (Path candidate : candidates) {
        Qualifiers qualifiers = Qualifiers.from(candidate.getParent());

        String filename = candidate.getFileName().toString();
        Density density = qualifiers.density;
        String resDirectory = candidate.getParent().getParent().toString();
        String key = String.format("%s/%s/%s", resDirectory, filename, qualifiers.others);
        imageValues.put(key, density, candidate);
    }

    for (String key : imageValues.rowKeySet()) {
        Map<Density, Path> options = imageValues.row(key);
        Set<Density> available = options.keySet();

        // This is to make sure we preserve the existing structure of drawable/ files.
        Set<Density> targets = targetDensities;
        if (available.contains(Density.NO_QUALIFIER) && !available.contains(Density.MDPI)) {
            targets = Sets.newHashSet(Iterables.transform(targetDensities,
                    input -> (input == Density.MDPI) ? Density.NO_QUALIFIER : input));
        }

        // We intend to keep all available targeted densities.
        Set<Density> toKeep = Sets.newHashSet(Sets.intersection(available, targets));

        // Make sure we have a decent fit for the largest target density.
        Density largestTarget = Density.ORDERING.max(targets);
        if (!available.contains(largestTarget)) {
            Density fallback = null;
            // Downscaling nine-patch drawables would require extra logic, not doing that yet.
            if (canDownscale && !options.values().iterator().next().toString().endsWith(".9.png")) {
                // Highest possible quality, because we'll downscale it.
                fallback = Density.ORDERING.max(available);
            } else {
                // We want to minimize size, so we'll go for the smallest available density that's
                // still larger than the missing one and, missing that, for the largest available.
                for (Density candidate : Density.ORDERING.reverse().sortedCopy(available)) {
                    if (fallback == null || Density.ORDERING.compare(candidate, largestTarget) > 0) {
                        fallback = candidate;
                    }
                }
            }
            toKeep.add(fallback);
        }

        // Mark remaining densities for removal.
        for (Density density : Sets.difference(available, toKeep)) {
            removals.add(options.get(density));
        }
    }

    return removals.build();
}

From source file:org.matsim.contrib.dvrp.router.LeastCostPathCalculatorWithCache.java

@Override
public Path calcLeastCostPath(Node fromNode, Node toNode, double starttime, Person person, Vehicle vehicle) {
    Table<Id<Node>, Id<Node>, Path> spCacheSlice = pathCache[timeDiscretizer.getIdx(starttime)];
    Path path = spCacheSlice.get(fromNode.getId(), toNode.getId());

    if (path == null) {
        cacheMisses++;//from   w w  w  .j a  va2  s.c  om
        path = calculator.calcLeastCostPath(fromNode, toNode, starttime, person, vehicle);
        spCacheSlice.put(fromNode.getId(), toNode.getId(), path);
    } else {
        cacheHits++;
    }

    return path;
}

From source file:fr.ymanvieu.trading.rate.HistoricalService.java

private Table<String, String, List<Quote>> sort(List<Quote> quotes) {
    Table<String, String, List<Quote>> sortedQuotes = HashBasedTable.create();

    for (Quote q : quotes) {
        List<Quote> res = sortedQuotes.get(q.getCode(), q.getCurrency());

        if (res == null) {
            res = new ArrayList<>();
            sortedQuotes.put(q.getCode(), q.getCurrency(), res);
        }/*from  w ww . j  ava 2s .co m*/

        res.add(q);
    }

    return sortedQuotes;
}