Example usage for com.google.common.collect Multimap values

List of usage examples for com.google.common.collect Multimap values

Introduction

In this page you can find the example usage for com.google.common.collect Multimap values.

Prototype

Collection<V> values();

Source Link

Document

Returns a view collection containing the value from each key-value pair contained in this multimap, without collapsing duplicates (so values().size() == size() ).

Usage

From source file:com.torodb.torod.db.backends.query.processors.InProcessor.java

@Nullable
private static ProcessedQueryCriteria getNumericQuery(InQueryCriteria criteria,
        Multimap<ScalarType, ScalarValue<?>> byTypeValues) {
    ImmutableList.Builder<ScalarValue<?>> newInBuilder = ImmutableList.builder();

    for (ScalarValue<?> value : byTypeValues.values()) {
        newInBuilder.add(value);/*from w  w  w  .j  a  va2s .  c o  m*/
    }

    ImmutableList<ScalarValue<?>> newIn = newInBuilder.build();

    if (newIn.isEmpty()) {
        return null;
    }

    DisjunctionBuilder structureBuilder = new DisjunctionBuilder();

    structureBuilder.add(new TypeIsQueryCriteria(criteria.getAttributeReference(), ScalarType.DOUBLE));
    structureBuilder.add(new TypeIsQueryCriteria(criteria.getAttributeReference(), ScalarType.INTEGER));
    structureBuilder.add(new TypeIsQueryCriteria(criteria.getAttributeReference(), ScalarType.LONG));

    newInBuilder.addAll(byTypeValues.get(ScalarType.DOUBLE));
    newInBuilder.addAll(byTypeValues.get(ScalarType.INTEGER));
    newInBuilder.addAll(byTypeValues.get(ScalarType.LONG));

    return new ProcessedQueryCriteria(structureBuilder.build(),
            new InQueryCriteria(criteria.getAttributeReference(), newIn));
}

From source file:org.opendaylight.controller.config.yang.store.impl.ExtenderYangTracker.java

private static Collection<InputStream> fromUrlsToInputStreams(Multimap<Bundle, URL> multimap) {
    return Collections2.transform(multimap.values(), new Function<URL, InputStream>() {

        @Override/*from w ww . ja v a2 s .c  om*/
        public InputStream apply(URL url) {
            try {
                return url.openStream();
            } catch (IOException e) {
                logger.warn("Unable to open stream from {}", url);
                throw new IllegalStateException("Unable to open stream from " + url, e);
            }
        }
    });
}

From source file:org.opendaylight.controller.config.yang.store.impl.ExtenderYangTracker.java

private static Set<URL> setFromMultimapValues(Multimap<Bundle, URL> bundlesToYangURLs) {
    Set<URL> urls = Sets.newHashSet(bundlesToYangURLs.values());
    Preconditions.checkState(bundlesToYangURLs.size() == urls.size());
    return urls;//w w  w  . j av  a 2s.c om
}

From source file:com.ikanow.aleph2.search_service.elasticsearch.utils.ElasticsearchSparkUtils.java

/** 
 * @param input_config - the input settings
 * @return/* w ww  . j a  v a2 s .co m*/
 */
public static IAnalyticsAccessContext<DataFrame> getDataFrame(final Client client,
        final AnalyticThreadJobBean.AnalyticThreadJobInputBean job_input) {
    final SetOnce<Map<String, String>> _es_options = new SetOnce<>();

    return new IAnalyticsAccessContext<DataFrame>() {

        @Override
        public String describe() {
            //(return the entire thing)
            return ErrorUtils.get("service_name={0} options={1}",
                    this.getAccessService().right().value().getSimpleName(), _es_options.optional());
        }

        /* (non-Javadoc)
         * @see com.ikanow.aleph2.data_model.interfaces.data_analytics.IAnalyticsAccessContext#getAccessService()
         */
        @SuppressWarnings("unchecked")
        @Override
        public Either<DataFrame, Class<DataFrame>> getAccessService() {
            return Either.right((Class<DataFrame>) (Class<?>) DataFrame.class);
        }

        @Override
        public Optional<Map<String, Object>> getAccessConfig() {

            // OK this is horrible but we're going to return a map of lambdas SparkContext -> SchemaRDD

            //TODO (XXX): going to start off with a simple version of this:

            final String index_resource = ElasticsearchContext.READ_PREFIX
                    + ElasticsearchIndexUtils.getBaseIndexName(BeanTemplateUtils.build(DataBucketBean.class)
                            .with(DataBucketBean::full_name, job_input.resource_name_or_id()).done().get(),
                            Optional.empty())
                    + "*";

            //TODO (ALEPH-72): support multi-buckets / buckets with non-standard indexes ... also use the tmin/tmax
            // (needs MDB to pull out - because need to get the full bucket ugh)

            // Currently need to add types: 
            //TODO (ALEPH-72): from elasticsearch-hadoop 2.2.0.m2 this will no longer be necessary (currently at 2.2.0.m1)
            final Multimap<String, String> index_type_mapping = ElasticsearchIndexUtils.getTypesForIndex(client,
                    index_resource);
            final String type_resource = index_type_mapping.values().stream().collect(Collectors.toSet())
                    .stream().collect(Collectors.joining(","));
            final String final_index = ElasticsearchHadoopUtils
                    .getTimedIndexes(job_input, index_type_mapping, new Date())
                    .map(s -> Stream
                            .concat(s,
                                    TimeSliceDirUtils
                                            .getUntimedDirectories(index_type_mapping.keySet().stream()))
                            .collect(Collectors.joining(",")))
                    .orElse(index_resource);

            //TODO (ALEPH-72): handle single/multiple types

            final Map<String, String> es_options = ImmutableMap.<String, String>of(
                    "es.index.read.missing.as.empty", "yes", "es.read.metadata", "true",
                    "es.read.metadata.field", Aleph2EsInputFormat.ALEPH2_META_FIELD, "es.resource",
                    final_index + "/" + type_resource);

            _es_options.set(es_options);
            final String table_name = Optional.ofNullable(job_input.name())
                    .orElse(BucketUtils.getUniqueSignature(job_input.resource_name_or_id(), Optional.empty()));

            Function<SQLContext, DataFrame> f = sql_context -> {
                final DataFrame df = JavaEsSparkSQL.esDF(sql_context, es_options);
                df.registerTempTable(table_name);
                return df;
            };
            return Optional.of(ImmutableMap.of(table_name, (Object) f));
        }
    };
}

From source file:org.lightjason.agentspeak.action.builtin.bool.CEqual.java

/**
 * compare multimap/*from  w  w w.  j  ava 2s .com*/
 *
 * @param p_source source multimap
 * @param p_target multimap to compare
 * @return equality boolean flag
 */
private static boolean equalmultimap(@Nonnull final Multimap<?, ?> p_source,
        @Nonnull final Multimap<?, ?> p_target) {
    return Arrays.equals(p_source.asMap().keySet().toArray(), p_target.asMap().keySet().toArray())
            && Arrays.equals(p_source.values().toArray(), p_target.values().toArray());
}

From source file:org.sosy_lab.cpachecker.pcc.strategy.partitioning.PartitionChecker.java

public static boolean areElementsCoveredByPartitionElement(final Collection<AbstractState> pInOtherPartitions,
        Multimap<CFANode, AbstractState> pInPartition, final StopOperator pStop, final Precision pPrec)
        throws CPAException, InterruptedException {
    HashSet<AbstractState> partitionNodes = new HashSet<>(pInPartition.values());

    for (AbstractState outState : pInOtherPartitions) {
        if (!partitionNodes.contains(outState)
                && !pStop.stop(outState, pInPartition.get(AbstractStates.extractLocation(outState)), pPrec)) {
            return false;
        }//from www.  j  a  va2  s.  c  om
    }

    return true;
}

From source file:com.ikanow.aleph2.search_service.elasticsearch.utils.ElasticsearchHadoopUtils.java

/** 
 * @param input_config - the input settings
 * @return/*from   w w w. j  a va  2 s .c om*/
 */
@SuppressWarnings({ "rawtypes", "unchecked" })
public static IAnalyticsAccessContext<InputFormat> getInputFormat(final Client client,
        final AnalyticThreadJobBean.AnalyticThreadJobInputBean job_input) {
    return new IAnalyticsAccessContext<InputFormat>() {
        private LinkedHashMap<String, Object> _mutable_output = null;

        @Override
        public String describe() {
            //(return the entire thing)
            return ErrorUtils.get("service_name={0} options={1}",
                    this.getAccessService().right().value().getSimpleName(), this.getAccessConfig().get());
        }

        /* (non-Javadoc)
         * @see com.ikanow.aleph2.data_model.interfaces.data_analytics.IAnalyticsAccessContext#getAccessService()
         */
        @Override
        public Either<InputFormat, Class<InputFormat>> getAccessService() {
            return Either.right((Class<InputFormat>) (Class<?>) Aleph2EsInputFormat.class);
        }

        /* (non-Javadoc)
         * @see com.ikanow.aleph2.data_model.interfaces.data_analytics.IAnalyticsAccessContext#getAccessConfig()
         */
        @Override
        public Optional<Map<String, Object>> getAccessConfig() {
            if (null != _mutable_output) {
                return Optional.of(_mutable_output);
            }
            _mutable_output = new LinkedHashMap<>();

            // Check for input record limit:
            Optional.ofNullable(job_input.config()).map(cfg -> cfg.test_record_limit_request()).ifPresent(
                    max -> _mutable_output.put(Aleph2EsInputFormat.BE_DEBUG_MAX_SIZE, Long.toString(max)));

            final String index_resource = ElasticsearchContext.READ_PREFIX
                    + ElasticsearchIndexUtils.getBaseIndexName(BeanTemplateUtils.build(DataBucketBean.class)
                            .with(DataBucketBean::full_name, job_input.resource_name_or_id()).done().get(),
                            Optional.empty())
                    + "*";

            //TODO (ALEPH-72): support multi-buckets / buckets with non-standard indexes ... also use the tmin/tmax
            // (needs MDB to pull out - because need to get the full bucket ugh)

            // Currently need to add types: 
            //TODO (ALEPH-72): 2.2.0 you _can_ just put "indexes/" to get all types - that doesn't work for all es-hadoop code though
            final Multimap<String, String> index_type_mapping = ElasticsearchIndexUtils.getTypesForIndex(client,
                    index_resource);
            final String type_resource = index_type_mapping.values().stream().collect(Collectors.toSet())
                    .stream().collect(Collectors.joining(","));
            final String final_index = getTimedIndexes(job_input, index_type_mapping,
                    new Date())
                            .map(s -> Stream
                                    .concat(s,
                                            TimeSliceDirUtils.getUntimedDirectories(
                                                    index_type_mapping.keySet().stream()))
                                    .collect(Collectors.joining(",")))
                            .orElse(index_resource);

            _mutable_output.put("es.resource", final_index + "/" + type_resource);

            _mutable_output.put("es.read.metadata", "true");
            _mutable_output.put("es.read.metadata.field", Aleph2EsInputFormat.ALEPH2_META_FIELD);

            _mutable_output.put("es.index.read.missing.as.empty", "yes");

            _mutable_output.put("es.query",
                    Optional.ofNullable(job_input.filter()).map(f -> f.get("technology_override")).map(o -> {
                        return (o instanceof String) ? o.toString()
                                : _mapper.convertValue(o, JsonNode.class).toString();
                    }).orElse("?q=*"));
            //TODO (ALEPH-72) (incorporate tmin/tmax and also add a JSON mapping for the Aleph2 crud utils)

            // Here are the parameters that can be set:
            // es.query ... can be stringified JSON or a q=string .... eg conf.set("es.query", "?q=me*");  
            //config.set("es.resource", overallIndexNames.toString()); .. .this was in the format X,Y,Z[/type],,etc which then got copied to 
            // create a simple multi-input format .. looks like i didn't do anything if no type was set, unclear if that was an optimization
            // or if it doesn't work... (if it doesn't work then what you have to do is get the mappings for each index and
            // get the types and insert them all)
            //config.set("es.index.read.missing.as.empty", "yes");

            // (not sure if need to set just normal http port/host?)
            //config.set("es.net.proxy.http.host", "localhost");
            //config.set("es.net.proxy.http.port", "8888");

            return Optional.of(Collections.unmodifiableMap(_mutable_output));
        }
    };
}

From source file:org.eclipse.tracecompass.tmf.core.analysis.TmfAnalysisManager.java

/**
 * Gets all analysis module helpers that apply to a given trace type. For
 * each analysis ID, only one helper will be returned if more than one
 * applies./*w  w  w  .ja va2 s. c o m*/
 *
 * This map is read-only
 *
 * TODO: This method is only used to populate the project view in the UI. It
 * should be deprecated eventually, after some UI rework, so that the trace
 * type does not drive whether the analysis module applies or not to a
 * trace, but rather the content of the trace or experiment (once it is
 * opened)
 *
 * @param traceclass
 *            The trace class to get modules for
 * @return The map of available {@link IAnalysisModuleHelper}
 */
public static Map<String, IAnalysisModuleHelper> getAnalysisModules(Class<? extends ITmfTrace> traceclass) {
    Multimap<String, IAnalysisModuleHelper> allModules = getAnalysisModules();
    Map<String, IAnalysisModuleHelper> map = new HashMap<>();
    for (IAnalysisModuleHelper module : allModules.values()) {
        if (module.appliesToTraceType(traceclass)) {
            map.put(module.getId(), module);
        }
    }
    return ImmutableMap.copyOf(map);
}

From source file:org.streamingpool.ext.tensorics.streamfactory.DetailedTensoricsExpressionStreamFactory.java

private static Flowable<?> triggerObservable(Multimap<StreamId<?>, ? extends Flowable<?>> flowables,
        EvaluationStrategy strategy, DiscoveryService discoveryService) {
    if (strategy instanceof ContinuousEvaluation) {
        Collection<? extends Flowable<?>> streams = flowables.values();
        if (streams.isEmpty()) {
            LOGGER.warn("The expression does not contain any streams. "
                    + "Therefore it will never emit! This rarely might be what you want ;-)");
        }//from w  w w.j  a v a 2s.c o  m
        return Flowable.combineLatest(streams, TRIGGER_CONTEXT_COMBINER);
    }
    if (strategy instanceof BufferedEvaluation) {
        List<? extends Flowable<?>> triggeringObservables = flowables.entries().stream()
                .filter(e -> (e.getKey() instanceof OverlapBufferStreamId)).map(Entry::getValue)
                .collect(toList());
        if (triggeringObservables.isEmpty()) {
            throw new NoBufferedStreamSpecifiedException();
        }
        return zip(triggeringObservables, ImmutableSet::of);
    }
    if (strategy instanceof TriggeredEvaluation) {
        return fromPublisher(discoveryService.discover(((TriggeredEvaluation) strategy).triggeringStreamId()));
    }
    throw new IllegalArgumentException(
            "Unknown evaluationStrategy '" + strategy + "'. Cannot create trigger Observable.");
}

From source file:com.android.tools.idea.navigator.nodes.NdkModuleNode.java

@NotNull
public static Collection<AbstractTreeNode> getNativeSourceNodes(@NotNull Project project,
        @NotNull NdkModuleModel ndkModuleModel, @NotNull ViewSettings viewSettings) {
    NativeAndroidProject nativeAndroidProject = ndkModuleModel.getAndroidProject();
    Collection<String> sourceFileExtensions = nativeAndroidProject.getFileExtensions().keySet();

    NdkModuleModel.NdkVariant variant = ndkModuleModel.getSelectedVariant();
    Multimap<String, NativeArtifact> nativeLibraries = HashMultimap.create();
    for (NativeArtifact artifact : variant.getArtifacts()) {
        String artifactOutputFileName = artifact.getOutputFile().getName();
        nativeLibraries.put(artifactOutputFileName, artifact);
    }// w ww. j a  v a2  s . c  o m

    if (nativeLibraries.keySet().size() == 1) {
        return getSourceDirectoryNodes(project, nativeLibraries.values(), viewSettings, sourceFileExtensions);
    }

    List<AbstractTreeNode> children = Lists.newArrayList();
    for (String name : nativeLibraries.keySet()) {
        String nativeLibraryType = "";
        String nativeLibraryName = trimEnd(name, ".so");
        if (nativeLibraryName.length() < name.length()) {
            nativeLibraryType = "Shared Library";
        } else {
            nativeLibraryName = trimEnd(name, ".a");
            if (nativeLibraryName.length() < name.length()) {
                nativeLibraryType = "Static Library";
            }
        }
        nativeLibraryName = trimStart(nativeLibraryName, "lib");
        children.add(new NativeAndroidLibraryNode(project, nativeLibraryName, nativeLibraryType,
                nativeLibraries.get(name), viewSettings, sourceFileExtensions));
    }
    return children;
}