Example usage for com.google.common.collect Multimap get

List of usage examples for com.google.common.collect Multimap get

Introduction

In this page you can find the example usage for com.google.common.collect Multimap get.

Prototype

Collection<V> get(@Nullable K key);

Source Link

Document

Returns a view collection of the values associated with key in this multimap, if any.

Usage

From source file:com.torodb.torod.db.backends.query.processors.InProcessor.java

@Nullable
private static ProcessedQueryCriteria getNumericQuery(InQueryCriteria criteria,
        Multimap<ScalarType, ScalarValue<?>> byTypeValues) {
    ImmutableList.Builder<ScalarValue<?>> newInBuilder = ImmutableList.builder();

    for (ScalarValue<?> value : byTypeValues.values()) {
        newInBuilder.add(value);//  w ww.  j a  va  2 s  .  co  m
    }

    ImmutableList<ScalarValue<?>> newIn = newInBuilder.build();

    if (newIn.isEmpty()) {
        return null;
    }

    DisjunctionBuilder structureBuilder = new DisjunctionBuilder();

    structureBuilder.add(new TypeIsQueryCriteria(criteria.getAttributeReference(), ScalarType.DOUBLE));
    structureBuilder.add(new TypeIsQueryCriteria(criteria.getAttributeReference(), ScalarType.INTEGER));
    structureBuilder.add(new TypeIsQueryCriteria(criteria.getAttributeReference(), ScalarType.LONG));

    newInBuilder.addAll(byTypeValues.get(ScalarType.DOUBLE));
    newInBuilder.addAll(byTypeValues.get(ScalarType.INTEGER));
    newInBuilder.addAll(byTypeValues.get(ScalarType.LONG));

    return new ProcessedQueryCriteria(structureBuilder.build(),
            new InQueryCriteria(criteria.getAttributeReference(), newIn));
}

From source file:es.usc.citius.composit.core.composition.search.CompositSearch.java

private static <E> Set<Set<Operation<E>>> combine(Multimap<Set<E>, Operation<E>> matchMap) {
    // Get the groups
    Set<Set<E>> sets = matchMap.asMap().keySet();
    log.debug("\t\t> Performing set input cover of {}", sets);
    SetCoverIterator<E> sc = new SetCoverIterator<E>(sets);
    //sc.useParallelization(true);
    Set<Set<Operation<E>>> coveringSets = new HashSet<Set<Operation<E>>>();
    while (sc.hasNext()) {
        Set<Set<E>> group = sc.next();
        log.debug("\t\t\t+ Selected cover input group: {}", group);
        // Get the services associated to each E group
        List<Set<Operation<E>>> solution = new ArrayList<Set<Operation<E>>>();
        for (Set<E> inputsMatched : group) {
            Collection<Operation<E>> ops = matchMap.get(inputsMatched);
            solution.add(new HashSet<Operation<E>>(ops));
        }//from  ww  w .j  a  va 2  s .co  m
        log.debug("\t\t\t\t- Operation groups: {}", solution);
        // Generate cartesian product to decompose equivalent functional services
        Set<List<Operation<E>>> cartesian = Sets.cartesianProduct(solution);
        ;
        for (List<Operation<E>> cartesianResult : cartesian) {
            coveringSets.add(new HashSet<Operation<E>>(cartesianResult));
        }
        log.debug("\t\t\t\t- Cartesian product decomposition: {}", cartesian);
    }
    log.debug("\t\t\t- Cover sets before cartesian product decomposition {}", coveringSets);
    return coveringSets;
}

From source file:org.jboss.errai.idea.plugin.ui.TemplateUtil.java

public static DataFieldExistence dataFieldExistenceCheck(PsiAnnotation annotation, TemplateMetaData metaData) {
    final Multimap<String, TemplateDataField> inScopeDataFields = metaData.getAllDataFieldsInTemplate(false);
    final Map<String, ConsolidateDataFieldElementResult> dataFields = metaData.getConsolidatedDataFields();

    final AnnotationValueElement annoValueEl = Util.getValueStringFromAnnotationWithDefault(annotation);
    final String annoValue = annoValueEl.getValue();

    final Collection<TemplateDataField> result = inScopeDataFields.get(annoValue);
    if (result.isEmpty()) {
        if (dataFields.containsKey(annoValue)) {
            return DataFieldExistence.OUT_OF_SCOPE;
        } else {/* w  w w  .ja va 2s .co  m*/
            return DataFieldExistence.DOES_NOT_EXIST;
        }
    } else {
        return DataFieldExistence.EXISTS;
    }
}

From source file:grakn.core.graql.reasoner.plan.GraqlTraversalPlanner.java

/**
 *
 * @param atoms list of current atoms of interest
 * @param queryPattern corresponding pattern
 * @return an optimally ordered list of provided atoms
 *///from w  w  w. ja v a2 s.c o m
private static ImmutableList<Atom> planFromTraversal(List<Atom> atoms, Conjunction<?> queryPattern,
        TransactionOLTP tx) {
    Multimap<VarProperty, Atom> propertyMap = HashMultimap.create();
    atoms.stream().filter(atom -> !(atom instanceof OntologicalAtom))
            .forEach(atom -> atom.getVarProperties().forEach(property -> propertyMap.put(property, atom)));
    Set<VarProperty> properties = propertyMap.keySet();

    GraqlTraversal graqlTraversal = TraversalPlanner.createTraversal(queryPattern, tx);
    ImmutableList<Fragment> fragments = Iterables.getOnlyElement(graqlTraversal.fragments());

    List<Atom> atomList = new ArrayList<>();

    atoms.stream().filter(atom -> atom instanceof OntologicalAtom).forEach(atomList::add);

    fragments.stream().map(Fragment::varProperty).filter(Objects::nonNull).filter(properties::contains)
            .distinct().flatMap(property -> propertyMap.get(property).stream()).distinct()
            .forEach(atomList::add);

    //add any unlinked items (disconnected and indexed for instance)
    propertyMap.values().stream().filter(at -> !atomList.contains(at)).forEach(atomList::add);
    return ImmutableList.copyOf(atomList);
}

From source file:com.giaybac.traprange.MAIN.java

private static void extractTables(String[] args) {
    try {// www.j  a v a  2 s.  co m
        List<Integer> pages = getPages(args);
        List<Integer> exceptPages = getExceptPages(args);
        List<Integer[]> exceptLines = getExceptLines(args);
        String in = getIn(args);
        String out = getOut(args);

        PDFTableExtractor extractor = (new PDFTableExtractor()).setSource(in);
        //page
        for (Integer page : pages) {
            extractor.addPage(page);
        }
        //except page
        for (Integer exceptPage : exceptPages) {
            extractor.exceptPage(exceptPage);
        }
        //except lines
        List<Integer> exceptLineIdxs = new ArrayList<>();
        Multimap<Integer, Integer> exceptLineInPages = LinkedListMultimap.create();
        for (Integer[] exceptLine : exceptLines) {
            if (exceptLine.length == 1) {
                exceptLineIdxs.add(exceptLine[0]);
            } else if (exceptLine.length == 2) {
                int lineIdx = exceptLine[0];
                int pageIdx = exceptLine[1];
                exceptLineInPages.put(pageIdx, lineIdx);
            }
        }
        if (!exceptLineIdxs.isEmpty()) {
            extractor.exceptLine(Ints.toArray(exceptLineIdxs));
        }
        if (!exceptLineInPages.isEmpty()) {
            for (int pageIdx : exceptLineInPages.keySet()) {
                extractor.exceptLine(pageIdx, Ints.toArray(exceptLineInPages.get(pageIdx)));
            }
        }
        //begin parsing pdf file
        List<Table> tables = extractor.extract();

        Writer writer = new OutputStreamWriter(new FileOutputStream(out), "UTF-8");
        try {
            for (Table table : tables) {
                writer.write("Page: " + (table.getPageIdx() + 1) + "\n");
                writer.write(table.toHtml());
            }
        } finally {
            try {
                writer.close();
            } catch (Exception e) {
            }
        }
    } catch (Exception e) {
        logger.error(null, e);
    }
}

From source file:com.android.tools.idea.templates.GradleFileMerger.java

private static void mergeDependencies(@NotNull PsiElement fromRoot, @NotNull PsiElement toRoot,
        @NotNull Project project) {/*from   w w w  .j  a  va 2  s.c o m*/
    Multimap<String, GradleCoordinate> dependencies = LinkedListMultimap.create();
    List<String> unparseableDependencies = new ArrayList<String>();

    // Load existing dependencies into the map for the existing build.gradle
    pullDependenciesIntoMap(toRoot, dependencies, null);

    // Load dependencies into the map for the new build.gradle
    pullDependenciesIntoMap(fromRoot, dependencies, unparseableDependencies);

    GroovyPsiElementFactory factory = GroovyPsiElementFactory.getInstance(project);

    RepositoryUrlManager urlManager = RepositoryUrlManager.get();

    for (String key : dependencies.keySet()) {
        GradleCoordinate highest = Collections.max(dependencies.get(key), COMPARE_PLUS_LOWER);

        // For test consistency, don't depend on installed SDK state while testing
        if (!ApplicationManager.getApplication().isUnitTestMode()
                || Boolean.getBoolean("force.gradlemerger.repository.check")) {
            // If this coordinate points to an artifact in one of our repositories, check to see if there is a static version
            // that we can add instead of a plus revision.
            if (RepositoryUrlManager.supports(highest.getArtifactId())) {
                String libraryCoordinate = urlManager.getLibraryCoordinate(highest.getArtifactId(), null,
                        false /* No previews */);
                GradleCoordinate available = GradleCoordinate.parseCoordinateString(libraryCoordinate);

                if (available != null) {
                    File archiveFile = urlManager.getArchiveForCoordinate(available);
                    if (archiveFile != null && archiveFile.exists()
                            && COMPARE_PLUS_LOWER.compare(available, highest) >= 0) {
                        highest = available;
                    }
                }
            }
        }
        PsiElement dependencyElement = factory
                .createStatementFromText(String.format(COMPILE_FORMAT, highest.toString()));
        toRoot.addBefore(dependencyElement, toRoot.getLastChild());
    }
    for (String unparseableDependency : unparseableDependencies) {
        PsiElement dependencyElement = factory.createStatementFromText(unparseableDependency);
        toRoot.addBefore(dependencyElement, toRoot.getLastChild());
    }
}

From source file:com.gradleware.tooling.toolingmodel.repository.internal.DefaultOmniBuildInvocationsContainerBuilder.java

private static ImmutableSortedMap<Path, OmniBuildInvocations> buildBuildInvocationsMapping(
        GradleProject project, Multimap<Path, OmniProjectTask> projectTasks,
        Multimap<Path, OmniTaskSelector> taskSelectors) {
    Preconditions.checkState(taskSelectors.keySet().containsAll(projectTasks.keySet()),
            "Task selectors are always configured for all projects");

    // create mappings for all projects which contain tasks selectors (which covers at least those projects that contain project tasks)
    ImmutableSortedMap.Builder<Path, OmniBuildInvocations> mapping = ImmutableSortedMap
            .orderedBy(Path.Comparator.INSTANCE);
    for (Path projectPath : taskSelectors.keySet()) {
        ImmutableList<OmniProjectTask> projectTasksOfProject = ImmutableSortedSet
                .orderedBy(TaskComparator.INSTANCE).addAll(projectTasks.get(projectPath)).build().asList();
        ImmutableList<OmniTaskSelector> taskSelectorsOfProject = ImmutableSortedSet
                .orderedBy(TaskSelectorComparator.INSTANCE).addAll(taskSelectors.get(projectPath)).build()
                .asList();//from   w  w w .  j  a v  a2s  .com
        mapping.put(projectPath,
                DefaultOmniBuildInvocations.from(projectTasksOfProject, taskSelectorsOfProject));
    }

    // create additional mappings for all those projects which do not contain any task selectors
    // this is the case if a project does not contain any tasks nor does any of its child projects
    // these additional mappings ensure the caller never gets back null for any project in the hierarchy
    Set<Path> projectPaths = Sets.newLinkedHashSet();
    collectProjectPathsRecursively(project, projectPaths);
    projectPaths.removeAll(taskSelectors.keySet());
    for (Path projectPath : projectPaths) {
        mapping.put(projectPath, DefaultOmniBuildInvocations.from(ImmutableList.<OmniProjectTask>of(),
                ImmutableList.<OmniTaskSelector>of()));
    }

    return mapping.build();
}

From source file:com.google.gerrit.httpd.restapi.RestApiServlet.java

private static void enablePrettyPrint(GsonBuilder gb, Multimap<String, String> config,
        @Nullable HttpServletRequest req) {
    String pp = Iterables.getFirst(config.get("pp"), null);
    if (pp == null) {
        pp = Iterables.getFirst(config.get("prettyPrint"), null);
        if (pp == null && req != null) {
            pp = acceptsJson(req) ? "0" : "1";
        }//  www. jav  a2s  .  co m
    }
    if ("1".equals(pp) || "true".equals(pp)) {
        gb.setPrettyPrinting();
    }
}

From source file:org.jboss.weld.annotated.enhanced.jlr.EnhancedAnnotatedTypeImpl.java

private static boolean isOverridden(EnhancedAnnotatedMethod<?, ?> method,
        Multimap<MethodSignature, Package> seenMethods) {
    if (method.isPrivate()) {
        return false;
    } else if (method.isPackagePrivate() && seenMethods.containsKey(method.getSignature())) {
        return seenMethods.get(method.getSignature()).contains(method.getPackage());
    } else {/*from  w  w  w .  j a  va 2 s .c o m*/
        return seenMethods.containsKey(method.getSignature());
    }
}

From source file:net.sourcedestination.sai.comparison.matching.MatchingGenerator.java

/** given a matching of nodes, extends the matching to pair up all edges which
 * have isomorphically matched incident nodes. In the case of a multigraph, 
 * edges are matched arbitrarily./*from   ww w  .  j  a va2s.co m*/
 * 
 * @param nodeMatching
 * @param fscc
 * @return
 */
public static GraphMatching induceEdgeMatchingUndirected(GraphMatching nodeMatching,
        FeatureSetCompatibilityChecker fscc) {
    final Graph g1 = nodeMatching.getGraph1();
    final Graph g2 = nodeMatching.getGraph2();
    BiMap<Integer, Integer> edgeMatch = HashBiMap.create();

    Multimap<Set<Integer>, Integer> g2Edges = HashMultimap.create();
    g2.getEdgeIDs().forEach(
            g2e -> g2Edges.put(Sets.newHashSet(g2.getEdgeSourceNodeID(g2e), g2.getEdgeTargetNodeID(g2e)), g2e));

    g1.getEdgeIDs().forEach(eid -> {
        int g1n1 = g1.getEdgeSourceNodeID(eid);
        int g1n2 = g1.getEdgeTargetNodeID(eid);
        int g2n1 = nodeMatching.getMatchedNodeInGraph2(g1n1);
        int g2n2 = nodeMatching.getMatchedNodeInGraph2(g1n2);
        if (g2n1 == -1 || g2n2 == -1)
            return; //skip edges with unmapped nodes in graph 2      

        if (g2Edges.get(Sets.newHashSet(g2n1, g2n2)).size() == 0)
            return; //skip if it can't be matched to a graph 2 edge

        int g2MatchedEdge = -1; // make sure the edges are compatible
        for (int g2e : g2Edges.get(Sets.newHashSet(g2n1, g2n2)))
            if (fscc.apply(g1.getEdgeFeatures(eid).collect(toSet()), g2.getEdgeFeatures(g2e).collect(toSet())))
                g2MatchedEdge = g2e;

        if (g2MatchedEdge != -1) //if we found a match, record it
            edgeMatch.put(eid, g2MatchedEdge);
    });
    return includeEdgeMatching(nodeMatching, edgeMatch);
}