Example usage for com.google.common.collect Multimap putAll

List of usage examples for com.google.common.collect Multimap putAll

Introduction

In this page you can find the example usage for com.google.common.collect Multimap putAll.

Prototype

boolean putAll(@Nullable K key, Iterable<? extends V> values);

Source Link

Document

Stores a key-value pair in this multimap for each of values , all using the same key, key .

Usage

From source file:org.apache.drill.common.scanner.persistence.ScanResult.java

/**
 * merges this and other together into a new result object
 * @param other/*from   w w w.j a va2 s. com*/
 * @return the resulting merge
 */
public ScanResult merge(ScanResult other) {
    final Multimap<String, ChildClassDescriptor> newImpls = HashMultimap.create();
    for (Collection<ParentClassDescriptor> impls : asList(implementations, other.implementations)) {
        for (ParentClassDescriptor c : impls) {
            newImpls.putAll(c.getName(), c.getChildren());
        }
    }
    List<ParentClassDescriptor> newImplementations = new ArrayList<>();
    for (Entry<String, Collection<ChildClassDescriptor>> entry : newImpls.asMap().entrySet()) {
        newImplementations.add(new ParentClassDescriptor(entry.getKey(), new ArrayList<>(entry.getValue())));
    }

    return new ScanResult(merge(scannedPackages, other.scannedPackages),
            merge(scannedClasses, other.scannedClasses), merge(scannedAnnotations, other.scannedAnnotations),
            merge(annotatedClasses, other.annotatedClasses), newImplementations);
}

From source file:org.terasology.web.servlet.ModuleServlet.java

@GET
@Path("show")
@Produces(MediaType.TEXT_HTML)/*from w  ww  .  j av a 2  s .c  om*/
public Viewable show() {
    logger.info("Requested module list as HTML");

    Set<Name> names = model.getModuleIds();

    // the key needs to be string, so that FreeMarker can use it for lookups
    Multimap<String, Module> map = TreeMultimap.create(String.CASE_INSENSITIVE_ORDER, versionComparator);

    for (Name name : names) {
        map.putAll(name.toString(), model.getModuleVersions(name));
    }

    ImmutableMap<Object, Object> dataModel = ImmutableMap.builder().put("items", map.asMap())
            .put("version", VersionInfo.getVersion()).build();
    return new Viewable("/module-list.ftl", dataModel);
}

From source file:com.dnastack.bob.service.impl.BeaconResponseServiceImpl.java

private Multimap<Beacon, Beacon> setUpChildrenMultimap(Collection<Beacon> beacons) {
    Multimap<Beacon, Beacon> children = HashMultimap.create();
    for (Beacon b : beacons) {
        if (b.isAggregator()) {
            children.putAll(b, beaconDao.findDescendants(b, false, true, false, false));
        } else {/*w w w.  ja v  a2s.  c  o m*/
            children.put(b, b);
        }
    }
    return children;
}

From source file:rabbit.data.internal.xml.access.AbstractAccessor.java

/**
 * Gets the data from the XML files./* w w w. j  a  va2s.  c om*/
 * 
 * @param start The start date of the data to get.
 * @param end The end date of the data to get.
 * @return The data between the dates, inclusive.
 */
private Multimap<WorkspaceStorage, S> getXmlData(LocalDate start, LocalDate end) {

    XMLGregorianCalendar startDate = toXmlDate(start);
    XMLGregorianCalendar endDate = toXmlDate(end);
    XmlPlugin plugin = XmlPlugin.getDefault();

    IPath[] storagePaths = plugin.getStoragePaths();
    Multimap<WorkspaceStorage, S> data = LinkedListMultimap.create(storagePaths.length);
    Multimap<WorkspaceStorage, File> files = LinkedListMultimap.create(storagePaths.length);

    for (IPath storagePath : storagePaths) {
        List<File> fileList = getDataStore().getDataFiles(start, end, storagePath);
        IPath workspacePath = plugin.getWorkspacePath(storagePath);
        files.putAll(new WorkspaceStorage(storagePath, workspacePath), fileList);
    }

    for (Map.Entry<WorkspaceStorage, File> entry : files.entries()) {
        for (S list : getCategories(getDataStore().read(entry.getValue()))) {

            XMLGregorianCalendar date = list.getDate();
            if (date == null) {
                continue; // Ignore invalid data.
            }
            if (startDate.compare(date) <= 0 && date.compare(endDate) <= 0) {
                data.put(entry.getKey(), list);
            }
        }
    }
    return data;
}

From source file:org.eclipse.xtext.ui.refactoring.impl.AbstractReferenceUpdater.java

protected void createClusteredReferenceUpdates(ElementRenameArguments elementRenameArguments,
        Multimap<URI, IReferenceDescription> resource2references, ResourceSet resourceSet,
        IRefactoringUpdateAcceptor updateAcceptor, StatusWrapper status, IProgressMonitor monitor) {
    SubMonitor progress = SubMonitor.convert(monitor, resource2references.keySet().size() + 1);
    if (loadTargetResources(resourceSet, elementRenameArguments, status, progress.newChild(1))) {
        if (getClusterSize() > 0) {
            Set<Resource> targetResources = newHashSet(resourceSet.getResources());
            Multimap<URI, IReferenceDescription> cluster = HashMultimap.create();
            SubMonitor clusterMonitor = progress.newChild(1);
            for (URI referringResourceURI : resource2references.keySet()) {
                cluster.putAll(referringResourceURI, resource2references.get(referringResourceURI));
                if (cluster.keySet().size() == getClusterSize()) {
                    unloadNonTargetResources(resourceSet, targetResources);
                    createReferenceUpdatesForCluster(elementRenameArguments, cluster, resourceSet,
                            updateAcceptor, status, clusterMonitor);
                    cluster.clear();/*from   w  w w.  j  ava2 s.c  o  m*/
                }
            }
            if (!cluster.isEmpty()) {
                unloadNonTargetResources(resourceSet, targetResources);
                createReferenceUpdatesForCluster(elementRenameArguments, cluster, resourceSet, updateAcceptor,
                        status, clusterMonitor);
            }
        } else {
            createReferenceUpdatesForCluster(elementRenameArguments, resource2references, resourceSet,
                    updateAcceptor, status, progress.newChild(resource2references.keySet().size()));
        }
    }
}

From source file:com.griddynamics.jagger.master.Master.java

@Override
public void run() {
    validateConfiguration();/*from www.  j av  a 2  s.c  o  m*/

    if (!keyValueStorage.isAvailable()) {
        keyValueStorage.initialize();
    }

    String sessionId = sessionIdProvider.getSessionId();
    String sessionComment = sessionIdProvider.getSessionComment();

    Multimap<NodeType, NodeId> allNodes = HashMultimap.create();
    allNodes.putAll(NodeType.MASTER, coordinator.getAvailableNodes(NodeType.MASTER));

    Map<NodeType, CountDownLatch> countDownLatchMap = Maps.newHashMap();
    CountDownLatch agentCountDownLatch = new CountDownLatch(
            conditions.isMonitoringEnable() ? conditions.getMinAgentsCount() : 0);
    CountDownLatch kernelCountDownLatch = new CountDownLatch(conditions.getMinKernelsCount());
    countDownLatchMap.put(NodeType.AGENT, agentCountDownLatch);
    countDownLatchMap.put(NodeType.KERNEL, kernelCountDownLatch);

    new StartWorkConditions(allNodes, countDownLatchMap);

    try {
        agentCountDownLatch.await(timeoutConfiguration.getNodeAwaitTime(), TimeUnit.MILLISECONDS);
        kernelCountDownLatch.await(timeoutConfiguration.getNodeAwaitTime(), TimeUnit.MILLISECONDS);
    } catch (InterruptedException e) {
        log.warn("CountDownLatch await interrupted", e);
    }

    if (configuration.getMonitoringConfiguration() != null) {
        dynamicPlotGroups.setJmxMetricGroups(configuration.getMonitoringConfiguration()
                .getMonitoringSutConfiguration().getJmxMetricGroups());
        Map<ManageAgent.ActionProp, Serializable> agentStartManagementProps = Maps.newHashMap();

        agentStartManagementProps.put(ManageAgent.ActionProp.SET_JMX_METRICS,
                dynamicPlotGroups.getJmxMetrics());
        processAgentManagement(sessionId, agentStartManagementProps);
    }

    for (SessionExecutionListener listener : configuration.getSessionExecutionListeners()) {
        listener.onSessionStarted(sessionId, allNodes);
    }

    Thread shutdownHook = new Thread(String.format("Shutdown hook for %s", getClass().toString())) {
        @Override
        public void run() {
            terminateConfiguration();
        }
    };
    terminateConfigurationLatch = new CountDownLatch(1);
    Runtime.getRuntime().addShutdownHook(shutdownHook);
    try {
        log.info("Configuration launched!!");

        SessionExecutionStatus status = runConfiguration(allNodes);

        log.info("Configuration work finished!!");

        for (SessionExecutionListener listener : configuration.getSessionExecutionListeners()) {
            if (listener instanceof SessionListener) {
                ((SessionListener) listener).onSessionExecuted(sessionId, sessionComment, status);
            } else {
                listener.onSessionExecuted(sessionId, sessionComment);
            }
        }

        log.info("Going to generate report");
        if (configuration.getReport() != null) {
            configuration.getReport().renderReport(true);
        } else {
            reportingService.renderReport(true);
        }

        log.info("Report generated");

        log.info("Going to stop all agents");
        processAgentManagement(sessionId, agentStopManagementProps);
        log.info("Agents stopped");
    } finally {
        try {
            Runtime.getRuntime().removeShutdownHook(shutdownHook);
        } catch (Exception e) {
        }
        terminateConfigurationLatch.countDown();
    }
}

From source file:edu.cmu.lti.oaqa.baseqa.passage.rerank.scorers.LuceneInMemoryPassageScorer.java

@Override
public void prepare(JCas jcas) throws AnalysisEngineProcessException {
    uri2conf2score = HashBasedTable.create();
    uri2conf2rank = HashBasedTable.create();
    // index//from w  w  w  .j av  a  2s . com
    List<Passage> passages = TypeUtil.getRankedPassages(jcas);
    RAMDirectory index = new RAMDirectory();
    try (IndexWriter writer = new IndexWriter(index, new IndexWriterConfig(analyzer))) {
        for (Passage passage : passages) {
            Document doc = new Document();
            doc.add(new StringField("uri", TypeUtil.getUriOffsets(passage, ":"), Field.Store.YES));
            doc.add(new TextField("text", passage.getText(), Field.Store.NO));
            writer.addDocument(doc);
        }
        writer.close();
        reader = DirectoryReader.open(index);
        searcher = new IndexSearcher(reader);
    } catch (IOException e) {
        throw new AnalysisEngineProcessException(e);
    }
    // queries
    List<String> tokens = TypeUtil.getOrderedTokens(jcas).stream().map(Token::getCoveredText)
            .map(QueryParser::escape).filter(name -> !name.isEmpty() && !stoplist.contains(name.toLowerCase()))
            .collect(toList());
    Multimap<String, String> ctype2names = HashMultimap.create();
    for (Concept concept : TypeUtil.getConcepts(jcas)) {
        Set<String> ctypes = TypeUtil.getConceptTypes(concept).stream().map(ConceptType::getAbbreviation)
                .collect(toSet());
        String cnames = TypeUtil.getConceptNames(concept).stream()
                .map(LuceneInMemoryPassageScorer::normalizeQuoteName).distinct().collect(joining(" "));
        ctypes.stream().filter(t -> !FORBIDDEN_CTYPES.contains(t))
                .forEach(ctype -> ctype2names.put(ctype, cnames));
    }
    Multimap<String, String> ctypepre2names = HashMultimap.create();
    ctype2names.asMap().entrySet().forEach(e -> ctypepre2names.putAll(e.getKey().split(":")[0], e.getValue()));
    Multimap<String, String> ctype2mentions = HashMultimap.create();
    for (Concept concept : TypeUtil.getConcepts(jcas)) {
        Set<String> ctypes = TypeUtil.getConceptTypes(concept).stream().map(ConceptType::getAbbreviation)
                .collect(toSet());
        String cmentions = TypeUtil.getConceptMentions(concept).stream().map(ConceptMention::getMatchedName)
                .map(LuceneInMemoryPassageScorer::normalizeQuoteName).distinct().collect(joining(" "));
        ctypes.stream().filter(t -> !FORBIDDEN_CTYPES.contains(t))
                .forEach(ctype -> ctype2mentions.put(ctype, cmentions));
    }
    Multimap<String, String> ctypepre2mentions = HashMultimap.create();
    ctypepre2mentions.asMap().entrySet()
            .forEach(e -> ctypepre2mentions.putAll(e.getKey().split(":")[0], e.getValue()));
    LOG.debug("Query strings");
    ExecutorService service = Executors.newCachedThreadPool();
    // execute against all tokens
    service.submit(() -> {
        String concatTokens = String.join(" ", tokens);
        LOG.debug(" - Concatenated tokens: {}", concatTokens);
        search(concatTokens, "tokens_concatenated@all");
    });
    // execute against concatenated concept names
    service.submit(() -> {
        String concatCnames = String.join(" ", ctype2names.values());
        LOG.debug(" - Concatenated concept names: {}", concatCnames);
        search(concatCnames, "cnames_concatenated@all");
    });
    // execute against concatenated concept mentions
    service.submit(() -> {
        String concatCmentions = String.join(" ", ctype2mentions.values());
        LOG.debug(" - Concatenated concept mentions: {}", concatCmentions);
        search(concatCmentions, "cmentions_concatenated@all");
    });
    // execute against concept names for each concept
    service.submit(() -> {
        for (String cnames : ImmutableSet.copyOf(ctype2names.values())) {
            LOG.debug(" - Concatenated concept names: {}", cnames);
            search(cnames, "cnames_individual@all");
        }
    });
    // execute against concept names for each concept type
    service.submit(() -> {
        for (String ctype : ctype2names.keySet()) {
            String concatCnames = String.join(" ", ctype2names.get(ctype));
            LOG.debug(" - Concatenated concept names for {}: {}", ctype, concatCnames);
            search(concatCnames, "cnames@" + ctype + "@all");
        }
    });
    // execute against concept names for each concept type prefix
    service.submit(() -> {
        for (String ctypepre : ctypepre2names.keySet()) {
            String concatCnames = String.join(" ", ctypepre2names.get(ctypepre));
            LOG.debug(" - Concatenated concept names for {}: {}", ctypepre, concatCnames);
            search(concatCnames, "cnames@" + ctypepre + "@all");
        }
    });
    // execute against concept mentions for each concept
    service.submit(() -> {
        for (String cmentions : ImmutableSet.copyOf(ctype2mentions.values())) {
            LOG.debug(" - Concatenated concept mentions: {}", cmentions);
            search(cmentions, "cmentions_individual@all");
        }
    });
    // execute against concept mentions for each concept type
    service.submit(() -> {
        for (String ctype : ctype2mentions.keySet()) {
            String concatCmentions = String.join(" ", ctype2mentions.get(ctype));
            LOG.debug(" - Concatenated concept mentions for {}: {}", ctype, concatCmentions);
            search(concatCmentions, "cmentions@" + ctype + "@all");
        }
    });
    // execute against concept mentions for each concept type prefix
    service.submit(() -> {
        for (String ctypepre : ctypepre2mentions.keySet()) {
            String concatCmentions = String.join(" ", ctypepre2mentions.get(ctypepre));
            LOG.debug(" - Concatenated concept mentions for {}: {}", ctypepre, concatCmentions);
            search(concatCmentions, "cmentions@" + ctypepre + "@all");
        }
    });
    service.shutdown();
    try {
        service.awaitTermination(1, TimeUnit.MINUTES);
    } catch (InterruptedException e) {
        throw new AnalysisEngineProcessException(e);
    }
    confs = uri2conf2score.columnKeySet();
}

From source file:org.eclipse.m2e.core.internal.project.registry.BasicProjectRegistry.java

public Map<ArtifactKey, Collection<IFile>> getWorkspaceArtifacts(String groupId, String artifactId) {
    Multimap<ArtifactKey, IFile> artifacts = HashMultimap.create();
    for (Map.Entry<ArtifactKey, Set<IFile>> entry : workspaceArtifacts.entrySet()) {
        ArtifactKey workspaceKey = entry.getKey();
        if (groupId.equals(workspaceKey.getGroupId()) && artifactId.equals(workspaceKey.getArtifactId())) {
            artifacts.putAll(workspaceKey, entry.getValue());
        }/*from ww  w  .j  a  v  a 2 s  . com*/
    }
    return artifacts.asMap();
}

From source file:org.splevo.ui.handler.vpm.MergeVariationPointsHandler.java

private Multimap<String, SoftwareElement> collectSoftwareElementsToMove(Set<VariationPoint> vpsToMerge,
        VariationPoint survivingVP) {/*ww w .j a  va 2 s.com*/
    Multimap<String, SoftwareElement> variantSoftwareElements = LinkedListMultimap.create();
    for (VariationPoint vp : vpsToMerge) {

        // skip the surviving vp to not modify the emf elements
        if (survivingVP == vp) {
            continue;
        }

        for (Variant variant : vp.getVariants()) {
            variantSoftwareElements.putAll(variant.getId(), variant.getImplementingElements());
        }
    }
    return variantSoftwareElements;
}

From source file:com.cloudant.sync.replication.ChangesResultWrapper.java

public Multimap<String, String> openRevisions(int start, int end) {
    Preconditions.checkArgument(start >= 0, "Start position must be greater or equal to zero.");
    Preconditions.checkArgument(end > start, "End position must be greater than start.");
    Preconditions.checkArgument(end <= this.size(), "End position must be smaller than changes feed size.");

    Multimap<String, String> openRevisions = HashMultimap.create();
    for (int i = start; i < end; i++) {
        ChangesResult.Row row = this.getResults().get(i);
        List<ChangesResult.Row.Rev> revisions = row.getChanges();
        Set<String> openRevs = new HashSet<String>(revisions.size());
        for (ChangesResult.Row.Rev rev : revisions) {
            openRevs.add(rev.getRev());//from  w  w w.  j  ava  2s  . c  om
        }
        openRevisions.putAll(row.getId(), openRevs);
    }
    return openRevisions;
}