Example usage for com.google.common.collect Multimap asMap

List of usage examples for com.google.common.collect Multimap asMap

Introduction

In this page you can find the example usage for com.google.common.collect Multimap asMap.

Prototype

Map<K, Collection<V>> asMap();

Source Link

Document

Returns a view of this multimap as a Map from each distinct key to the nonempty collection of that key's associated values.

Usage

From source file:edu.harvard.med.screensaver.db.datafetcher.TupleDataFetcher.java

/**
 * @param keys if null, fetches all entities for the root entity type (subject
 *          to normal column criteria)//from  ww w  .jav  a  2 s .c  o m
 */
protected Map<K, Tuple<K>> doFetchData(Set<K> keys) {
    // collate properties into groups of PropertyPaths having same RelationshipPath; 
    // this will allow us to execute one query for each group of properties that are from the same entity type 
    Multimap<RelationshipPath<E>, PropertyPath<E>> pathGroups = Multimaps.index(getProperties(),
            new Function<PropertyPath<E>, RelationshipPath<E>>() {
                public RelationshipPath<E> apply(PropertyPath<E> p) {
                    return p.getAncestryPath();
                }
            });

    Map<K, Tuple<K>> tuples = Maps.newHashMapWithExpectedSize(keys.size());
    for (Collection<PropertyPath<E>> propertyPaths : pathGroups.asMap().values()) {
        List<PropertyPath<E>> orderedPropertyPaths = Lists.newArrayList(propertyPaths);
        if (log.isDebugEnabled()) {
            log.debug("fetching " + keys.size() + " values for properties " + orderedPropertyPaths);
        }
        List<Object[]> result = _dao.runQuery(buildQueryForProperty(orderedPropertyPaths, keys));
        packageResultIntoTuples(tuples, orderedPropertyPaths, result);
    }
    return tuples;
}

From source file:org.apache.drill.common.scanner.persistence.ScanResult.java

/**
 * merges this and other together into a new result object
 * @param other/* w ww  .j  a  va 2s.com*/
 * @return the resulting merge
 */
public ScanResult merge(ScanResult other) {
    final Multimap<String, ChildClassDescriptor> newImpls = HashMultimap.create();
    for (Collection<ParentClassDescriptor> impls : asList(implementations, other.implementations)) {
        for (ParentClassDescriptor c : impls) {
            newImpls.putAll(c.getName(), c.getChildren());
        }
    }
    List<ParentClassDescriptor> newImplementations = new ArrayList<>();
    for (Entry<String, Collection<ChildClassDescriptor>> entry : newImpls.asMap().entrySet()) {
        newImplementations.add(new ParentClassDescriptor(entry.getKey(), new ArrayList<>(entry.getValue())));
    }

    return new ScanResult(merge(scannedPackages, other.scannedPackages),
            merge(scannedClasses, other.scannedClasses), merge(scannedAnnotations, other.scannedAnnotations),
            merge(annotatedClasses, other.annotatedClasses), newImplementations);
}

From source file:org.springframework.ide.eclipse.boot.dash.views.DeleteElementsAction.java

@Override
public void run() {
    //Deletes are implemented per BootDashModel. So sort selection into bins per model.
    Multimap<BootDashModel, BootDashElement> sortingBins = HashMultimap.create();
    for (BootDashElement e : getSelectedElements()) {
        BootDashModel model = e.getBootDashModel();
        //We are only capable of removing elements from a DeleteCapabableModel.
        if (model instanceof DeletionCapabableModel) {
            sortingBins.put(model, e);/*from   w  w  w  . ja va  2  s. c om*/
        }
    }
    //Now delete elements from corresponding models.
    for (final Entry<BootDashModel, Collection<BootDashElement>> workitem : sortingBins.asMap().entrySet()) {
        BootDashModel model = workitem.getKey();
        final DeletionCapabableModel modifiable = (DeletionCapabableModel) model; //cast is safe. Only DeleteCapabableModel are added to sortingBins
        if (ui.confirmOperation("Deleting Elements",
                modifiable.getDeletionConfirmationMessage(workitem.getValue()))) {
            Job job = new Job("Deleting Elements from " + model.getRunTarget().getName()) {
                @Override
                protected IStatus run(IProgressMonitor monitor) {
                    modifiable.delete(workitem.getValue(), ui);
                    return Status.OK_STATUS;
                }

            };
            job.schedule();
        }
    }
}

From source file:com.isotrol.impe3.nr.api.Node.java

private void setHighlight(Multimap<String, String> highlight) {
    this.highlightMap = Maps.newHashMap(highlight.asMap());
}

From source file:com.isotrol.impe3.nr.api.Node.java

private void setProperties(Multimap<String, String> properties) {
    this.propertiesMap = Maps.newHashMap(properties.asMap());
}

From source file:org.eclipse.che.workspace.infrastructure.kubernetes.provision.InstallerServersPortProvisioner.java

@VisibleForTesting
Map<Integer, Collection<String>> getServersRefsGroupedByPorts(InstallerImpl installer) {
    Multimap<Integer, String> portToServerNames = ArrayListMultimap.create();

    for (Entry<String, ? extends ServerConfig> serverEntry : installer.getServers().entrySet()) {
        String serverName = serverEntry.getKey();
        ServerConfig serverConfig = serverEntry.getValue();

        Pair<Integer, String> portProtocol = getPortProtocol(serverConfig.getPort());

        portToServerNames.put(portProtocol.first, serverName);
    }/*from  ww w.ja  v  a2s . co m*/

    return portToServerNames.asMap();
}

From source file:eu.itesla_project.ucte.network.ext.UcteNetworkExt.java

private UndirectedGraph<UcteNodeCode, Object> createSubstationGraph(UcteNetwork network) {
    UndirectedGraph<UcteNodeCode, Object> graph = new Pseudograph<>(Object.class);
    for (UcteNode node : network.getNodes()) {
        graph.addVertex(node.getCode());
    }//  w  ww  .j av  a  2 s  .  c o m

    // in the same substation...
    // ...nodes with same geographical spot
    Multimap<String, UcteNode> nodesByGeographicalSpot = Multimaps.index(network.getNodes(),
            new Function<UcteNode, String>() {
                @Override
                public String apply(UcteNode node) {
                    return node.getCode().getGeographicalSpot();
                }
            });
    for (Map.Entry<String, Collection<UcteNode>> entry : nodesByGeographicalSpot.asMap().entrySet()) {
        for (UcteNode n1 : entry.getValue()) {
            for (UcteNode n2 : entry.getValue()) {
                if (n1 != n2) {
                    graph.addEdge(n1.getCode(), n2.getCode());
                }
            }
        }
    }

    // ...nodes connected by a transformer
    for (UcteTransformer tfo : network.getTransformers()) {
        UcteNodeCode nodeCode1 = tfo.getId().getNodeCode1();
        UcteNodeCode nodeCode2 = tfo.getId().getNodeCode2();
        graph.addEdge(nodeCode1, nodeCode2);
    }

    // ...nodes connected by a coupler or by a low impedance line
    for (UcteLine l : network.getLines()) {
        UcteNodeCode nodeCode1 = l.getId().getNodeCode1();
        UcteNodeCode nodeCode2 = l.getId().getNodeCode2();
        if (l.getStatus() == UcteElementStatus.BUSBAR_COUPLER_IN_OPERATION
                || l.getStatus() == UcteElementStatus.BUSBAR_COUPLER_OUT_OF_OPERATION) {
            graph.addEdge(nodeCode1, nodeCode2);
        } else {
            double z = Math.hypot(l.getResistance(), l.getReactance());
            if (z < lineMinZ) {
                graph.addEdge(nodeCode1, nodeCode2);
            }
        }
    }

    return graph;
}

From source file:com.hortonworks.streamline.streams.layout.storm.StormTopologyFluxGenerator.java

@Override
public void visit(final RulesProcessor rulesProcessor) {
    rulesProcessor.getConfig().setAny("outputStreams", rulesProcessor.getOutputStreams());
    List<Rule> rulesWithWindow = new ArrayList<>();
    List<Rule> rulesWithoutWindow = new ArrayList<>();
    for (Rule rule : rulesProcessor.getRules()) {
        if (rule.getWindow() != null) {
            rulesWithWindow.add(rule);// w  w w  .j a v a 2s .  c  o  m
        } else {
            rulesWithoutWindow.add(rule);
        }
    }
    // handle windowed rules with WindowRuleBoltFluxComponent
    if (!rulesWithWindow.isEmpty()) {
        Multimap<Window, Rule> windowedRules = ArrayListMultimap.create();
        for (Rule rule : rulesWithWindow) {
            windowedRules.put(rule.getWindow(), rule);
        }
        int windowedRulesProcessorId = 0;
        // create windowed bolt per unique window configuration
        for (Map.Entry<Window, Collection<Rule>> entry : windowedRules.asMap().entrySet()) {
            RulesProcessor windowedRulesProcessor = new RulesProcessor(rulesProcessor);
            windowedRulesProcessor.setRules(new ArrayList<>(entry.getValue()));
            windowedRulesProcessor.setId(rulesProcessor.getId() + "." + ++windowedRulesProcessorId);
            windowedRulesProcessor.setName("WindowedRulesProcessor");
            windowedRulesProcessor.getConfig().setAny(RulesProcessor.CONFIG_KEY_RULES,
                    Collections2.transform(entry.getValue(), new Function<Rule, Long>() {
                        @Override
                        public Long apply(Rule input) {
                            return input.getId();
                        }
                    }));
            LOG.debug("Rules processor with window {}", windowedRulesProcessor);
            keysAndComponents.add(makeEntry(StormTopologyLayoutConstants.YAML_KEY_BOLTS, getYamlComponents(
                    fluxComponentFactory.getFluxComponent(windowedRulesProcessor), windowedRulesProcessor)));
            // Wire the windowed bolt with the appropriate edges
            wireWindowedRulesProcessor(windowedRulesProcessor, topologyDag.getEdgesTo(rulesProcessor),
                    topologyDag.getEdgesFrom(rulesProcessor));
            mayBeUpdateTopologyConfig(entry.getKey());
        }
    }
    if (rulesWithoutWindow.isEmpty()) {
        removeFluxStreamsTo(getFluxId(rulesProcessor));
    } else {
        rulesProcessor.setRules(rulesWithoutWindow);
        rulesProcessor.getConfig().setAny(RulesProcessor.CONFIG_KEY_RULES,
                Collections2.transform(rulesWithoutWindow, new Function<Rule, Long>() {
                    @Override
                    public Long apply(Rule input) {
                        return input.getId();
                    }
                }));
        keysAndComponents.add(makeEntry(StormTopologyLayoutConstants.YAML_KEY_BOLTS,
                getYamlComponents(fluxComponentFactory.getFluxComponent(rulesProcessor), rulesProcessor)));
    }
}

From source file:com.b2international.snowowl.datastore.server.snomed.SnomedCDOConflictProcessor.java

private Conflict checkDetachedReferences(
        Multimap<CDOID, Pair<EStructuralFeature, CDOID>> newRevisionIdToFeatureIdMap, CDOID id) {
    for (Entry<CDOID, Collection<Pair<EStructuralFeature, CDOID>>> entry : newRevisionIdToFeatureIdMap.asMap()
            .entrySet()) {//from   w w  w  .  j a  v  a2 s. c  o m
        for (Pair<EStructuralFeature, CDOID> featureAndId : entry.getValue()) {
            if (featureAndId.getB().equals(id)) {
                if (isRebase) {
                    return new AddedInTargetAndDetachedInSourceConflict(id, entry.getKey(),
                            featureAndId.getA().getName());
                } else {
                    return new AddedInSourceAndDetachedInTargetConflict(entry.getKey(), id,
                            featureAndId.getA().getName());
                }
            }
        }
    }
    return null;
}

From source file:org.eclipse.xtext.serializer.sequencer.ContextFinder.java

protected Iterable<ISerializationContext> findContextsByContainer(EObject sem,
        Iterable<ISerializationContext> contextCandidates) {
    if (sem.eResource() != null && sem.eResource().getContents().contains(sem))
        return Collections.singleton(getRootContext(sem));
    EReference ref = sem.eContainmentFeature();
    if (ref == null || (contextCandidates != null && Iterables.size(contextCandidates) < 2))
        return contextCandidates;
    Multimap<IConstraint, ISerializationContext> containerConstraints = getConstraints(sem.eContainer());
    int refID = sem.eContainer().eClass().getFeatureID(ref);
    Set<ISerializationContext> childContexts = Sets.newLinkedHashSet();
    for (Entry<IConstraint, Collection<ISerializationContext>> e : Lists
            .newArrayList(containerConstraints.asMap().entrySet())) {
        IConstraint constraint = e.getKey();
        Collection<ISerializationContext> contexts = e.getValue();
        if (constraint.getFeatures()[refID] == null)
            containerConstraints.removeAll(constraint);
        else {//w ww  .  j a  v  a2s.co  m
            childContexts.addAll(createContextsForFeatures(contexts, constraint.getFeatures()[refID], sem));
        }
    }
    Set<ISerializationContext> result;
    if (contextCandidates != null) {
        result = Sets.newLinkedHashSet(contextCandidates);
        result.retainAll(childContexts);
    } else
        result = childContexts;
    if (result.size() < 2)
        return result;
    Iterable<ISerializationContext> filteredContexts = findContextsByContainer(sem.eContainer(),
            containerConstraints.values());
    childContexts = Sets.newLinkedHashSet();
    for (Entry<IConstraint, Collection<ISerializationContext>> e : Lists
            .newArrayList(containerConstraints.asMap().entrySet()))
        if (intersect(filteredContexts, e.getValue()))
            childContexts.addAll(createContextsForFeatures(e.getValue(), e.getKey().getFeatures()[refID], sem));
    result.retainAll(childContexts);
    return result;
}