Example usage for com.google.common.collect Multimap values

List of usage examples for com.google.common.collect Multimap values

Introduction

In this page you can find the example usage for com.google.common.collect Multimap values.

Prototype

Collection<V> values();

Source Link

Document

Returns a view collection containing the value from each key-value pair contained in this multimap, without collapsing duplicates (so values().size() == size() ).

Usage

From source file:com.ikanow.aleph2.analytics.spark.assets.SparkPassthroughTopology.java

public static void main(String[] args)
        throws InstantiationException, IllegalAccessException, ClassNotFoundException {

    try {/*from w w  w .  j a va 2s .  com*/
        final Tuple2<IAnalyticsContext, Optional<ProcessingTestSpecBean>> aleph2_tuple = SparkTechnologyUtils
                .initializeAleph2(args);
        final IAnalyticsContext context = aleph2_tuple._1();
        final Optional<ProcessingTestSpecBean> test_spec = aleph2_tuple._2();

        // Optional: make really really sure it exists after the specified timeout
        SparkTechnologyUtils.registerTestTimeout(test_spec, () -> {
            System.exit(0);
        });

        //INFO:
        System.out.println("Starting SparkPassthroughTopology");

        SparkConf spark_context = new SparkConf().setAppName("SparkPassthroughTopology");

        final Optional<Double> sub_sample = test_spec
                .map(__ -> Optional.ofNullable(spark_context.getDouble(SUBSAMPLE_TEST, -1)))
                .orElseGet(() -> Optional.ofNullable(spark_context.getDouble(SUBSAMPLE_NORMAL, -1)))
                .filter(d -> d > 0);

        //INFO:
        sub_sample.ifPresent(d -> System.out.println("OPTIONS: sub_sample = " + d));
        test_spec.ifPresent(spec -> System.out
                .println("OPTIONS: test_spec = " + BeanTemplateUtils.toJson(spec).toString()));

        //DEBUG
        //final boolean test_mode = test_spec.isPresent(); // (serializable thing i can pass into the map)

        try (final JavaSparkContext jsc = new JavaSparkContext(spark_context)) {

            final Multimap<String, JavaPairRDD<Object, Tuple2<Long, IBatchRecord>>> inputs = SparkTechnologyUtils
                    .buildBatchSparkInputs(context, test_spec, jsc, Collections.emptySet());

            final Optional<JavaPairRDD<Object, Tuple2<Long, IBatchRecord>>> input = inputs.values().stream()
                    .reduce((acc1, acc2) -> acc1.union(acc2));

            long written = input.map(in -> in.values())
                    .map(rdd -> sub_sample.map(sample -> rdd.sample(true, sample)).orElse(rdd)).map(rdd -> {
                        return rdd.map(t2 -> {
                            final Validation<BasicMessageBean, JsonNode> ret_val = context.emitObject(
                                    Optional.empty(), context.getJob().get(), Either.left(t2._2().getJson()),
                                    Optional.empty());
                            return ret_val; // (doesn't matter what I return, just want to count it up)
                        })
                                //DEBUG: (print the output JSON on success and the error message on fail)
                                //.map(val -> test_mode ? val.f().bind(f -> Validation.fail("FAIL: " + f.message())) : val)
                                .count();
                    }).orElse(-1L);

            jsc.stop();

            //INFO:
            System.out.println("Wrote: data_objects=" + written);
        }
    } catch (Throwable t) {
        System.out.println(ErrorUtils.getLongForm("ERROR: {0}", t));
    }
}

From source file:com.ikanow.aleph2.analytics.spark.assets.SparkJsInterpreterTopology.java

public static void main(String[] args)
        throws InstantiationException, IllegalAccessException, ClassNotFoundException {

    final SetOnce<IBucketLogger> bucket_logger = new SetOnce<>();
    final SetOnce<String> job_name = new SetOnce<>(); // (the string we'll use in logging activities)
    try {/*from   w  w  w. ja va 2s.  co m*/
        final Tuple2<IAnalyticsContext, Optional<ProcessingTestSpecBean>> aleph2_tuple = SparkTechnologyUtils
                .initializeAleph2(args);
        final IAnalyticsContext context = aleph2_tuple._1();
        final Optional<ProcessingTestSpecBean> test_spec = aleph2_tuple._2();

        bucket_logger.set(context.getLogger(context.getBucket()));
        job_name.set(context.getJob().map(j -> j.name()).orElse("no_name"));

        // Optional: make really really sure it exists after the specified timeout
        SparkTechnologyUtils.registerTestTimeout(test_spec, () -> {
            System.exit(0);
        });

        final SparkTopologyConfigBean config = BeanTemplateUtils
                .from(context.getJob().map(job -> job.config()).orElse(Collections.emptyMap()),
                        SparkTopologyConfigBean.class)
                .get();

        final String js_script = Optional.ofNullable(config.script()).orElse("");

        //INFO:
        System.out.println("Starting " + job_name.get());

        SparkConf spark_context = new SparkConf().setAppName(job_name.get());

        test_spec.ifPresent(spec -> System.out
                .println("OPTIONS: test_spec = " + BeanTemplateUtils.toJson(spec).toString()));

        try (final JavaSparkContext jsc = new JavaSparkContext(spark_context)) {

            final Multimap<String, JavaPairRDD<Object, Tuple2<Long, IBatchRecord>>> inputs = SparkTechnologyUtils
                    .buildBatchSparkInputs(context, test_spec, jsc, Collections.emptySet());
            final JavaPairRDD<Object, Tuple2<Long, IBatchRecord>> all_inputs = inputs.values().stream()
                    .reduce((acc1, acc2) -> acc1.union(acc2)).orElse(null);

            // Load globals:
            ScriptEngineManager manager = new ScriptEngineManager();
            ScriptEngine engine = manager.getEngineByName("JavaScript");
            engine.put("_a2_global_context", context);
            engine.put("_a2_global_bucket", context.getBucket().get());
            engine.put("_a2_global_job", context.getJob().get());
            engine.put("_a2_global_config",
                    BeanTemplateUtils.configureMapper(Optional.empty()).convertValue(config, JsonNode.class));
            engine.put("_a2_global_mapper", BeanTemplateUtils.configureMapper(Optional.empty()));
            //TODO (until bucket logger is serializable, don't allow anywhere)
            //engine.put("_a2_bucket_logger", bucket_logger.optional().orElse(null));
            engine.put("_a2_enrichment_name", job_name.get());
            engine.put("_a2_spark_inputs", inputs);
            engine.put("_a2_spark_inputs_all", all_inputs);
            engine.put("_a2_spark_context", jsc);

            Stream.concat(config.uploaded_lang_files().stream(),
                    Stream.of("aleph2_sparkjs_globals_before.js", ""))
                    .flatMap(Lambdas.flatWrap_i(import_path -> {
                        try {
                            if (import_path.equals("")) { // also import the user script just before here
                                return js_script;
                            } else
                                return IOUtils.toString(SparkJsInterpreterTopology.class.getClassLoader()
                                        .getResourceAsStream(import_path), "UTF-8");
                        } catch (Throwable e) {
                            bucket_logger.optional()
                                    .ifPresent(l -> l.log(Level.ERROR,
                                            ErrorUtils.lazyBuildMessage(false,
                                                    () -> SparkJsInterpreterTopology.class.getSimpleName(),
                                                    () -> job_name.get() + ".main", () -> null,
                                                    () -> ErrorUtils.get(
                                                            "Error initializing stage {0} (script {1}): {2}",
                                                            job_name.get(), import_path, e.getMessage()),
                                                    () -> ImmutableMap.<String, Object>of("full_error",
                                                            ErrorUtils.getLongForm("{0}", e)))));

                            System.out.println(ErrorUtils.getLongForm("onStageInitialize: {0}", e));
                            throw e; // ignored
                        }
                    })).forEach(Lambdas.wrap_consumer_i(script -> {
                        try {
                            engine.eval(script);
                        } catch (Throwable e) {
                            bucket_logger.optional()
                                    .ifPresent(l -> l.log(Level.ERROR,
                                            ErrorUtils.lazyBuildMessage(false,
                                                    () -> SparkJsInterpreterTopology.class.getSimpleName(),
                                                    () -> job_name.get() + ".main", () -> null,
                                                    () -> ErrorUtils.get(
                                                            "Error initializing stage {0} (main script): {1}",
                                                            job_name.get(), e.getMessage()),
                                                    () -> ImmutableMap.<String, Object>of("full_error",
                                                            ErrorUtils.getLongForm("{0}", e)))));

                            System.out.println(ErrorUtils.getLongForm("onStageInitialize: {0}", e));
                            throw e; // ignored
                        }
                    }));
            ;

            jsc.stop();

            //INFO:
            System.out.println("Finished " + job_name.get());
        }
    } catch (Throwable t) {
        System.out.println(ErrorUtils.getLongForm("ERROR: {0}", t));

        bucket_logger.optional().ifPresent(l -> l.log(Level.ERROR, ErrorUtils.lazyBuildMessage(false,
                () -> SparkJsInterpreterTopology.class.getSimpleName()
                        + job_name.optional().map(j -> "." + j).orElse(""),
                () -> job_name.optional().orElse("global") + ".main", () -> null,
                () -> ErrorUtils.get("Error on batch in job {0}: {1}",
                        job_name.optional().orElse("global") + ".main", t.getMessage()),
                () -> ImmutableMap.<String, Object>of("full_error", ErrorUtils.getLongForm("{0}", t)))));
    }
}

From source file:org.icgc.dcc.portal.manifest.writer.EGAManifestWriter.java

private static Map<String, String> resolveFileIdMap(Multimap<String, ManifestFile> bundles) {
    return bundles.values().stream().collect(toMap(ManifestFile::getRepoFileId, ManifestFile::getId));
}

From source file:com.facebook.buck.cli.CommandHelper.java

public static void printToConsole(CommandRunnerParams params,
        Multimap<String, QueryTarget> targetsAndDependencies) {
    for (QueryTarget target : ImmutableSortedSet.copyOf(targetsAndDependencies.values())) {
        params.getConsole().getStdOut().println(target);
    }//w w w.j a v  a 2 s . co m
}

From source file:com.google.api.codegen.CodeGeneratorUtil.java

/**
 * Writes generated output to files under the given root.
 *///w ww.ja  v  a2s .  c om
public static <Element> void writeGeneratedOutput(String root, Multimap<Element, GeneratedResult> elements)
        throws IOException {
    Map<String, Doc> files = new LinkedHashMap<>();
    for (GeneratedResult generatedResult : elements.values()) {
        files.put(generatedResult.getFilename(), generatedResult.getDoc());
    }
    ToolUtil.writeFiles(files, root);
}

From source file:eu.itesla_project.cases.EntsoeCaseRepositoryConfig.java

private static Multimap<UcteGeographicalCode, String> checkedFormats(
        Multimap<UcteGeographicalCode, String> forbiddenFormatsByGeographicalCode,
        Collection<String> supportedFormats) {
    // check that formats are valids
    for (String format : forbiddenFormatsByGeographicalCode.values()) {
        if (!supportedFormats.contains(format)) {
            throw new IllegalArgumentException("Unsupported import format " + format);
        }//from ww w  .j  a v  a  2 s . c om
    }
    return forbiddenFormatsByGeographicalCode;
}

From source file:eu.itesla_project.entsoe.cases.EntsoeCaseRepositoryConfig.java

private static Multimap<EntsoeGeographicalCode, String> checkedFormats(
        Multimap<EntsoeGeographicalCode, String> forbiddenFormatsByGeographicalCode,
        Collection<String> supportedFormats) {
    // check that formats are valids
    for (String format : forbiddenFormatsByGeographicalCode.values()) {
        if (!supportedFormats.contains(format)) {
            throw new IllegalArgumentException("Unsupported import format " + format);
        }/*ww w  .  j  av  a  2  s  .c  om*/
    }
    return forbiddenFormatsByGeographicalCode;
}

From source file:org.jetbrains.jet.plugin.codeInsight.OverrideMethodsHandler.java

@NotNull
private static Set<CallableMemberDescriptor> collectSuperMethods(@NotNull ClassDescriptor classDescriptor) {
    Set<CallableMemberDescriptor> inheritedFunctions = new LinkedHashSet<CallableMemberDescriptor>();
    for (JetType supertype : classDescriptor.getTypeConstructor().getSupertypes()) {
        for (DeclarationDescriptor descriptor : supertype.getMemberScope().getAllDescriptors()) {
            if (descriptor instanceof CallableMemberDescriptor) {
                inheritedFunctions.add((CallableMemberDescriptor) descriptor);
            }//from  w  w  w .j a  v  a  2 s .c  o  m
        }
    }

    // Only those actually inherited
    Set<CallableMemberDescriptor> filteredMembers = OverridingUtil.filterOutOverridden(inheritedFunctions);

    // Group members with "the same" signature
    Multimap<CallableMemberDescriptor, CallableMemberDescriptor> factoredMembers = LinkedHashMultimap.create();
    for (CallableMemberDescriptor one : filteredMembers) {
        if (factoredMembers.values().contains(one))
            continue;
        for (CallableMemberDescriptor another : filteredMembers) {
            //                if (one == another) continue;
            factoredMembers.put(one, one);
            if (OverridingUtil.isOverridableBy(one, another).getResult() == OVERRIDABLE
                    || OverridingUtil.isOverridableBy(another, one).getResult() == OVERRIDABLE) {
                factoredMembers.put(one, another);
            }
        }
    }

    return factoredMembers.keySet();
}

From source file:org.jetbrains.kotlin.idea.codeInsight.OverrideMethodsHandler.java

@NotNull
private static Set<CallableMemberDescriptor> collectSuperMethods(@NotNull ClassDescriptor classDescriptor) {
    Set<CallableMemberDescriptor> inheritedFunctions = new LinkedHashSet<CallableMemberDescriptor>();
    for (JetType supertype : classDescriptor.getTypeConstructor().getSupertypes()) {
        for (DeclarationDescriptor descriptor : supertype.getMemberScope().getAllDescriptors()) {
            if (descriptor instanceof CallableMemberDescriptor) {
                inheritedFunctions.add((CallableMemberDescriptor) descriptor);
            }//w  w  w  .j  a va  2  s.  c o m
        }
    }

    // Only those actually inherited
    Set<CallableMemberDescriptor> filteredMembers = OverrideResolver.filterOutOverridden(inheritedFunctions);

    // Group members with "the same" signature
    Multimap<CallableMemberDescriptor, CallableMemberDescriptor> factoredMembers = LinkedHashMultimap.create();
    for (CallableMemberDescriptor one : filteredMembers) {
        if (factoredMembers.values().contains(one))
            continue;
        for (CallableMemberDescriptor another : filteredMembers) {
            //                if (one == another) continue;
            factoredMembers.put(one, one);
            if (OverridingUtil.DEFAULT.isOverridableBy(one, another).getResult() == OVERRIDABLE
                    || OverridingUtil.DEFAULT.isOverridableBy(another, one).getResult() == OVERRIDABLE) {
                factoredMembers.put(one, another);
            }
        }
    }

    return factoredMembers.keySet();
}

From source file:com.torodb.torod.db.postgresql.query.processors.InProcessor.java

@Nullable
private static ProcessedQueryCriteria getNumericQuery(InQueryCriteria criteria,
        Multimap<BasicType, Value<?>> byTypeValues) {
    ImmutableList.Builder<Value<?>> newInBuilder = ImmutableList.builder();

    for (Value<?> value : byTypeValues.values()) {
        newInBuilder.add(value);/*from ww w  .java2s  .c  om*/
    }

    ImmutableList<Value<?>> newIn = newInBuilder.build();

    if (newIn.isEmpty()) {
        return null;
    }

    DisjunctionBuilder structureBuilder = new DisjunctionBuilder();

    structureBuilder.add(new TypeIsQueryCriteria(criteria.getAttributeReference(), BasicType.DOUBLE));
    structureBuilder.add(new TypeIsQueryCriteria(criteria.getAttributeReference(), BasicType.INTEGER));
    structureBuilder.add(new TypeIsQueryCriteria(criteria.getAttributeReference(), BasicType.LONG));

    newInBuilder.addAll(byTypeValues.get(BasicType.DOUBLE));
    newInBuilder.addAll(byTypeValues.get(BasicType.INTEGER));
    newInBuilder.addAll(byTypeValues.get(BasicType.LONG));

    return new ProcessedQueryCriteria(structureBuilder.build(),
            new InQueryCriteria(criteria.getAttributeReference(), newIn));
}