Example usage for com.google.common.collect Multimaps asMap

List of usage examples for com.google.common.collect Multimaps asMap

Introduction

In this page you can find the example usage for com.google.common.collect Multimaps asMap.

Prototype

@Beta
public static <K, V> Map<K, Collection<V>> asMap(Multimap<K, V> multimap) 

Source Link

Document

Returns Multimap#asMap multimap.asMap() .

Usage

From source file:org.cinchapi.concourse.server.storage.db.SecondaryRecord.java

/**
 * Explore this record and return a mapping from PrimaryKey to the Values
 * that cause the corresponding records to satisfy {@code operator} in
 * relation to the specified {@code values} (and at the specified
 * {@code timestamp} if {@code historical} is {@code true}).
 * /*from www.  ja v  a 2  s.  co  m*/
 * @param historical - if {@code true} query the history, otherwise query
 *            the current state
 * @param timestamp - this value is ignored if {@code historical} is
 *            {@code false}, otherwise this value is the historical
 *            timestamp at which to query the field
 * @param operator
 * @param values
 * @return the relevant data that causes the matching records to satisfy the
 *         criteria
 */
private Map<PrimaryKey, Set<Value>> explore(boolean historical, long timestamp, Operator operator,
        Value... values) { /* Authorized */
    read.lock();
    try {
        SetMultimap<PrimaryKey, Value> data = TreeMultimap.create();
        Value value = values[0];
        if (operator == Operator.EQUALS) {
            for (PrimaryKey record : historical ? get(value, timestamp) : get(value)) {
                data.put(record, value);
            }
        } else if (operator == Operator.NOT_EQUALS) {
            for (Value stored : historical ? history.keySet() : present.keySet()) {
                if (!value.equals(stored)) {
                    for (PrimaryKey record : historical ? get(stored, timestamp) : get(stored)) {
                        data.put(record, stored);
                    }
                }
            }
        } else if (operator == Operator.GREATER_THAN) {
            for (Value stored : historical ? history.keySet()
                    : ((NavigableSet<Value>) present.keySet()).tailSet(value, false)) {
                if (!historical || stored.compareTo(value) > 0) {
                    for (PrimaryKey record : historical ? get(stored, timestamp) : get(stored)) {
                        data.put(record, stored);
                    }
                }
            }
        } else if (operator == Operator.GREATER_THAN_OR_EQUALS) {
            for (Value stored : historical ? history.keySet()
                    : ((NavigableSet<Value>) present.keySet()).tailSet(value, true)) {
                if (!historical || stored.compareTo(value) >= 0) {
                    for (PrimaryKey record : historical ? get(stored, timestamp) : get(stored)) {
                        data.put(record, stored);
                    }
                }
            }
        } else if (operator == Operator.LESS_THAN) {
            for (Value stored : historical ? history.keySet()
                    : ((NavigableSet<Value>) present.keySet()).headSet(value, false)) {
                if (!historical || stored.compareTo(value) < 0) {
                    for (PrimaryKey record : historical ? get(stored, timestamp) : get(stored)) {
                        data.put(record, stored);
                    }
                }
            }
        } else if (operator == Operator.LESS_THAN_OR_EQUALS) {
            for (Value stored : historical ? history.keySet()
                    : ((NavigableSet<Value>) present.keySet()).headSet(value, true)) {
                if (!historical || stored.compareTo(value) <= 0) {
                    for (PrimaryKey record : historical ? get(stored, timestamp) : get(stored)) {
                        data.put(record, stored);
                    }
                }
            }
        } else if (operator == Operator.BETWEEN) {
            Preconditions.checkArgument(values.length > 1);
            Value value2 = values[1];
            for (Value stored : historical ? history.keySet()
                    : ((NavigableSet<Value>) present.keySet()).subSet(value, true, value2, false)) {
                if (!historical || (stored.compareTo(value) >= 0 && stored.compareTo(value2) < 0)) {
                    for (PrimaryKey record : historical ? get(stored, timestamp) : get(stored)) {
                        data.put(record, stored);
                    }
                }
            }
        } else if (operator == Operator.REGEX) {
            Pattern p = Pattern.compile(value.getObject().toString());
            for (Value stored : historical ? history.keySet() : present.keySet()) {
                Matcher m = p.matcher(stored.getObject().toString());
                if (m.matches()) {
                    for (PrimaryKey record : historical ? get(stored, timestamp) : get(stored)) {
                        data.put(record, stored);
                    }
                }
            }
        } else if (operator == Operator.NOT_REGEX) {
            Pattern p = Pattern.compile(value.getObject().toString());
            for (Value stored : historical ? history.keySet() : present.keySet()) {
                Matcher m = p.matcher(stored.getObject().toString());
                if (!m.matches()) {
                    for (PrimaryKey record : historical ? get(stored, timestamp) : get(stored)) {
                        data.put(record, stored);
                    }
                }
            }
        } else {
            throw new UnsupportedOperationException();
        }
        return Multimaps.asMap(data);
    } finally {
        read.unlock();
    }
}

From source file:org.onosproject.net.neighbour.impl.NeighbourResolutionManager.java

@Override
public Map<ConnectPoint, Collection<NeighbourHandlerRegistration>> getHandlerRegistrations() {
    synchronized (packetHandlers) {
        return ImmutableMap.copyOf(Multimaps.asMap(packetHandlers));
    }/*  w  ww.j a  v  a 2s  . c o m*/
}

From source file:com.facebook.presto.raptor.RaptorMetadata.java

@Override
public Map<SchemaTableName, List<ColumnMetadata>> listTableColumns(ConnectorSession session,
        SchemaTablePrefix prefix) {/* w  w w  .  ja  va  2  s.co  m*/
    requireNonNull(prefix, "prefix is null");

    ImmutableListMultimap.Builder<SchemaTableName, ColumnMetadata> columns = ImmutableListMultimap.builder();
    for (TableColumn tableColumn : dao.listTableColumns(prefix.getSchemaName(), prefix.getTableName())) {
        if (tableColumn.getColumnName().equals(SAMPLE_WEIGHT_COLUMN_NAME)) {
            continue;
        }
        ColumnMetadata columnMetadata = new ColumnMetadata(tableColumn.getColumnName(),
                tableColumn.getDataType(), false);
        columns.put(tableColumn.getTable(), columnMetadata);
    }
    return Multimaps.asMap(columns.build());
}

From source file:org.glowroot.central.repo.GaugeValueDao.java

public void store(String agentId, List<GaugeValue> gaugeValues) throws Exception {
    if (gaugeValues.isEmpty()) {
        return;/* w ww.  ja v a 2s .  com*/
    }
    List<String> agentRollupIds = agentDao.readAgentRollupIds(agentId);
    int ttl = getTTLs().get(0);
    long maxCaptureTime = 0;
    List<ResultSetFuture> futures = Lists.newArrayList();
    for (GaugeValue gaugeValue : gaugeValues) {
        BoundStatement boundStatement = insertValuePS.get(0).bind();
        String gaugeName = gaugeValue.getGaugeName();
        long captureTime = gaugeValue.getCaptureTime();
        maxCaptureTime = Math.max(captureTime, maxCaptureTime);
        int adjustedTTL = AggregateDao.getAdjustedTTL(ttl, captureTime, clock);
        int i = 0;
        boundStatement.setString(i++, agentId);
        boundStatement.setString(i++, gaugeName);
        boundStatement.setTimestamp(i++, new Date(captureTime));
        boundStatement.setDouble(i++, gaugeValue.getValue());
        boundStatement.setLong(i++, gaugeValue.getWeight());
        boundStatement.setInt(i++, adjustedTTL);
        futures.add(session.executeAsync(boundStatement));
        for (String agentRollupId : agentRollupIds) {
            futures.addAll(gaugeNameDao.store(agentRollupId, gaugeName));
        }
    }

    // wait for success before inserting "needs rollup" records
    MoreFutures.waitForAll(futures);
    futures.clear();

    // insert into gauge_needs_rollup_1
    SetMultimap<Long, String> rollupCaptureTimes = getRollupCaptureTimes(gaugeValues);
    for (Entry<Long, Set<String>> entry : Multimaps.asMap(rollupCaptureTimes).entrySet()) {
        BoundStatement boundStatement = insertNeedsRollup.get(0).bind();
        Long captureTime = entry.getKey();
        int adjustedTTL = AggregateDao.getAdjustedTTL(ttl, captureTime, clock);
        int needsRollupAdjustedTTL = AggregateDao.getNeedsRollupAdjustedTTL(adjustedTTL,
                configRepository.getRollupConfigs());
        int i = 0;
        boundStatement.setString(i++, agentId);
        boundStatement.setTimestamp(i++, new Date(captureTime));
        boundStatement.setUUID(i++, UUIDs.timeBased());
        boundStatement.setSet(i++, entry.getValue());
        boundStatement.setInt(i++, needsRollupAdjustedTTL);
        futures.add(session.executeAsync(boundStatement));
    }
    MoreFutures.waitForAll(futures);
}

From source file:org.glowroot.central.repo.GaugeValueDaoImpl.java

public void store(String agentId, List<String> agentRollupIdsForMeta, List<GaugeValue> gaugeValues)
        throws Exception {
    if (gaugeValues.isEmpty()) {
        return;//from ww w  .jav a2s.co  m
    }
    int ttl = getTTLs().get(0);
    long maxCaptureTime = 0;
    List<Future<?>> futures = new ArrayList<>();
    for (GaugeValue gaugeValue : gaugeValues) {
        BoundStatement boundStatement = insertValuePS.get(0).bind();
        String gaugeName = gaugeValue.getGaugeName();
        long captureTime = gaugeValue.getCaptureTime();
        maxCaptureTime = Math.max(captureTime, maxCaptureTime);
        int adjustedTTL = Common.getAdjustedTTL(ttl, captureTime, clock);
        int i = 0;
        boundStatement.setString(i++, agentId);
        boundStatement.setString(i++, gaugeName);
        boundStatement.setTimestamp(i++, new Date(captureTime));
        boundStatement.setDouble(i++, gaugeValue.getValue());
        boundStatement.setLong(i++, gaugeValue.getWeight());
        boundStatement.setInt(i++, adjustedTTL);
        futures.add(session.writeAsync(boundStatement));
        for (String agentRollupIdForMeta : agentRollupIdsForMeta) {
            futures.addAll(gaugeNameDao.insert(agentRollupIdForMeta, captureTime, gaugeName));
        }
    }

    // wait for success before inserting "needs rollup" records
    MoreFutures.waitForAll(futures);
    futures.clear();

    // insert into gauge_needs_rollup_1
    Map<NeedsRollupKey, ImmutableSet<String>> updatesForNeedsRollupCache1 = new HashMap<>();
    SetMultimap<Long, String> rollupCaptureTimes = getRollupCaptureTimes(gaugeValues);
    for (Map.Entry<Long, Set<String>> entry : Multimaps.asMap(rollupCaptureTimes).entrySet()) {
        Long captureTime = entry.getKey();
        Set<String> gaugeNames = entry.getValue();
        NeedsRollupKey needsRollupKey = ImmutableNeedsRollupKey.of(agentId, captureTime);
        ImmutableSet<String> needsRollupGaugeNames = needsRollupCache1.get(needsRollupKey);
        if (needsRollupGaugeNames == null) {
            // first insert for this key
            updatesForNeedsRollupCache1.put(needsRollupKey, ImmutableSet.copyOf(gaugeNames));
        } else if (needsRollupGaugeNames.containsAll(gaugeNames)) {
            // capture current time after getting data from cache to prevent race condition with
            // reading the data in Common.getNeedsRollupList()
            if (!Common.isOldEnoughToRollup(captureTime, clock.currentTimeMillis(),
                    configRepository.getRollupConfigs().get(0).intervalMillis())) {
                // completely covered by prior inserts that haven't been rolled up yet so no
                // need to re-insert same data
                continue;
            }
        } else {
            // merge will maybe help prevent a few subsequent inserts
            Set<String> combined = new HashSet<>(needsRollupGaugeNames);
            combined.addAll(gaugeNames);
            updatesForNeedsRollupCache1.put(needsRollupKey, ImmutableSet.copyOf(gaugeNames));
        }
        BoundStatement boundStatement = insertNeedsRollup.get(0).bind();
        int adjustedTTL = Common.getAdjustedTTL(ttl, captureTime, clock);
        int needsRollupAdjustedTTL = Common.getNeedsRollupAdjustedTTL(adjustedTTL,
                configRepository.getRollupConfigs());
        int i = 0;
        boundStatement.setString(i++, agentId);
        boundStatement.setTimestamp(i++, new Date(captureTime));
        boundStatement.setUUID(i++, UUIDs.timeBased());
        boundStatement.setSet(i++, gaugeNames);
        boundStatement.setInt(i++, needsRollupAdjustedTTL);
        futures.add(session.writeAsync(boundStatement));
    }
    MoreFutures.waitForAll(futures);

    // update the cache now that the above inserts were successful
    needsRollupCache1.putAll(updatesForNeedsRollupCache1);
}

From source file:org.ambraproject.wombat.controller.BrowseController.java

private List<TypedArticleGroup> buildArticleGroups(Site site, String issueId, List<Map<String, ?>> articles)
        throws IOException {
    // Articles grouped by their type. Order within the value lists is significant.
    ArticleType.Dictionary typeDictionary = ArticleType.getDictionary(site.getTheme());
    ListMultimap<ArticleType, Map<String, Object>> groupedArticles = LinkedListMultimap.create();
    for (Map<String, ?> article : articles) {
        if (!article.containsKey("revisionNumber"))
            continue; // Omit unpublished articles

        Map<String, Object> populatedArticle = new HashMap<>(article);

        Map<String, ?> ingestion = (Map<String, ?>) article.get("ingestion");
        ArticleType articleType = typeDictionary.lookUp((String) ingestion.get("articleType"));

        populateRelatedArticles(populatedArticle);

        populateAuthors(populatedArticle, site);

        groupedArticles.put(articleType, populatedArticle);
    }//ww w. j ava2s.c om

    // The article types supported by this site, in the order in which they are supposed to appear.
    ImmutableList<ArticleType> articleTypes = typeDictionary.getSequence();

    // Produce a correctly ordered list of TypedArticleGroup, populated with the article groups.
    List<TypedArticleGroup> articleGroups = new ArrayList<>(articleTypes.size());
    for (ArticleType articleType : articleTypes) {
        List<Map<String, Object>> articlesOfType = groupedArticles.removeAll(articleType);
        if (!articlesOfType.isEmpty()) {
            articleGroups.add(new TypedArticleGroup(articleType, articlesOfType));
        }
    }

    // If any article groups were not matched, append them to the end.
    for (Map.Entry<ArticleType, List<Map<String, Object>>> entry : Multimaps.asMap(groupedArticles)
            .entrySet()) {
        ArticleType type = entry.getKey();
        TypedArticleGroup group = new TypedArticleGroup(type, entry.getValue());
        articleGroups.add(group);

        log.warn(String.format("Issue %s has articles of type \"%s\", which is not configured for %s: %s",
                issueId, type.getName(), site.getKey(),
                Lists.transform(group.articles, article -> article.get("doi"))));
    }

    return articleGroups;
}

From source file:de.bund.bfr.knime.gis.views.canvas.CanvasUtils.java

public static Map<String, Set<String>> getPossibleValues(Collection<? extends Element> elements) {
    SetMultimap<String, String> values = LinkedHashMultimap.create();

    for (Element e : elements) {
        e.getProperties().forEach((property, value) -> {
            if (value instanceof Boolean) {
                values.putAll(property, Arrays.asList(Boolean.FALSE.toString(), Boolean.TRUE.toString()));
            } else if (value != null) {
                values.put(property, value.toString());
            }/*  www  .  j a v a  2s  .  c o  m*/
        });
    }

    return Multimaps.asMap(values);
}

From source file:dagger.internal.codegen.ComponentModelBuilder.java

protected void addInterfaceMethods() {
    /* Each component method may have been declared by several supertypes. We want to implement only
     * one method for each distinct signature.*/
    ImmutableListMultimap<MethodSignature, ComponentMethodDescriptor> componentMethodsBySignature = Multimaps
            .index(graph.componentDescriptor().entryPointMethods(), this::getMethodSignature);
    for (List<ComponentMethodDescriptor> methodsWithSameSignature : Multimaps.asMap(componentMethodsBySignature)
            .values()) {//w w w  . ja v  a  2  s  . co m
        ComponentMethodDescriptor anyOneMethod = methodsWithSameSignature.stream().findAny().get();
        generatedComponentModel.addMethod(COMPONENT_METHOD,
                bindingExpressions.getComponentMethod(anyOneMethod));
    }
}

From source file:io.prestosql.plugin.raptor.legacy.RaptorMetadata.java

@Override
public Map<SchemaTableName, List<ColumnMetadata>> listTableColumns(ConnectorSession session,
        SchemaTablePrefix prefix) {//from   ww  w.j  a  va  2 s  . co m
    requireNonNull(prefix, "prefix is null");

    ImmutableListMultimap.Builder<SchemaTableName, ColumnMetadata> columns = ImmutableListMultimap.builder();
    for (TableColumn tableColumn : dao.listTableColumns(prefix.getSchema().orElse(null),
            prefix.getTable().orElse(null))) {
        ColumnMetadata columnMetadata = new ColumnMetadata(tableColumn.getColumnName(),
                tableColumn.getDataType());
        columns.put(tableColumn.getTable(), columnMetadata);
    }
    return Multimaps.asMap(columns.build());
}

From source file:com.google.template.soy.types.proto.SoyProtoTypeImpl.java

public SoyProtoTypeImpl(SoyTypeRegistry typeRegistry, Descriptor descriptor, Set<FieldDescriptor> extensions) {
    this.typeRegistry = typeRegistry;
    this.typeDescriptor = descriptor;
    ImmutableMap.Builder<String, Field> builder = ImmutableMap.builder();
    for (FieldDescriptor fieldDescriptor : descriptor.getFields()) {
        if (Protos.shouldJsIgnoreField(fieldDescriptor)) {
            continue;
        }/*from w  w w  .j a  v  a 2s . c  o  m*/
        NormalField field = new NormalField(fieldDescriptor);
        builder.put(field.getName(), field);
    }
    SetMultimap<String, ExtensionField> extensionsBySoyName = HashMultimap.create();
    for (FieldDescriptor extension : extensions) {
        ExtensionField field = new ExtensionField(extension);
        extensionsBySoyName.put(field.getName(), field);
    }
    for (Map.Entry<String, Set<ExtensionField>> group : Multimaps.asMap(extensionsBySoyName).entrySet()) {
        String fieldName = group.getKey();
        Set<ExtensionField> ambiguousFields = group.getValue();
        if (ambiguousFields.size() == 1) {
            builder.put(fieldName, Iterables.getOnlyElement(ambiguousFields));
        } else {
            AmbiguousFieldSet value = new AmbiguousFieldSet(fieldName, ambiguousFields);

            logger.severe("Proto " + descriptor.getFullName() + " has multiple extensions with the name \""
                    + fieldName + "\": " + value.fullFieldNames
                    + " this field will not be accessible from soy");
            builder.put(fieldName, value);
        }
    }
    fields = builder.build();
}