Example usage for java.util.stream Stream concat

List of usage examples for java.util.stream Stream concat

Introduction

In this page you can find the example usage for java.util.stream Stream concat.

Prototype

public static <T> Stream<T> concat(Stream<? extends T> a, Stream<? extends T> b) 

Source Link

Document

Creates a lazily concatenated stream whose elements are all the elements of the first stream followed by all the elements of the second stream.

Usage

From source file:net.sourceforge.fenixedu.domain.Person.java

public static Stream<Person> findPersonStream(final String name, final int size) {
    return Stream.concat(PersonName.findPersonStream(name, size).map(n -> n.getPerson()),
            UserProfile.searchByName(name, size).map(p -> p.getPerson()).filter(Objects::nonNull));
}

From source file:net.sourceforge.fenixedu.domain.Person.java

public static Stream<Person> findInternalPersonStream(final String name, final int size) {
    return Stream.concat(PersonName.findInternalPersonStream(name, size).map(n -> n.getPerson()),
            UserProfile.searchByName(name, size).map(p -> p.getPerson()).filter(Objects::nonNull)
                    .filter(p -> !p.isExternalPerson()));
}

From source file:net.sourceforge.fenixedu.domain.Person.java

public static Stream<Person> findExternalPersonStream(final String name, final int size) {
    return Stream.concat(PersonName.findExternalPersonStream(name, size).map(n -> n.getPerson()),
            UserProfile.searchByName(name, size).map(p -> p.getPerson()).filter(Objects::nonNull)
                    .filter(p -> p.isExternalPerson()));
}

From source file:org.apache.druid.indexing.kafka.supervisor.KafkaSupervisor.java

private void updateCurrentOffsets() throws InterruptedException, ExecutionException, TimeoutException {
    final List<ListenableFuture<Void>> futures = Stream
            .concat(taskGroups.values().stream().flatMap(taskGroup -> taskGroup.tasks.entrySet().stream()),
                    pendingCompletionTaskGroups.values().stream().flatMap(List::stream)
                            .flatMap(taskGroup -> taskGroup.tasks.entrySet().stream()))
            .map(task -> Futures.transform(taskClient.getCurrentOffsetsAsync(task.getKey(), false),
                    (Function<Map<Integer, Long>, Void>) (currentOffsets) -> {

                        if (currentOffsets != null && !currentOffsets.isEmpty()) {
                            task.getValue().currentOffsets = currentOffsets;
                        }/*  www .  jav  a  2 s .c  om*/

                        return null;
                    }))
            .collect(Collectors.toList());

    Futures.successfulAsList(futures).get(futureTimeoutInSeconds, TimeUnit.SECONDS);
}

From source file:com.ikanow.aleph2.data_import_manager.analytics.actors.DataBucketAnalyticsChangeActor.java

/** Utility to set the per modules settings for the context 
 * @param job/*from  w  ww .  j a  v  a 2 s.c om*/
 * @param context
 * @param libs
 */
protected final static void setPerJobContextParams(final AnalyticThreadJobBean job,
        final AnalyticsContext context, final Map<String, Tuple2<SharedLibraryBean, String>> libs) {
    context.resetJob(job);
    context.resetLibraryConfigs(Stream
            .concat(Optional.ofNullable(job.module_name_or_id()).map(Stream::of).orElseGet(Stream::empty),
                    Optional.ofNullable(job.library_names_or_ids()).map(l -> l.stream())
                            .orElseGet(Stream::empty))
            .map(lib -> libs.get(lib)).filter(lib -> null != lib).map(lib -> lib._1())
            .flatMap(
                    lib -> Arrays.asList(Tuples._2T(lib._id(), lib), Tuples._2T(lib.path_name(), lib)).stream())
            .collect(Collectors.toMap(t2 -> t2._1(), t2 -> t2._2(), (a, b) -> a, // (can't happen(
                    () -> new LinkedHashMap<String, SharedLibraryBean>())));
}

From source file:com.ikanow.aleph2.data_import_manager.analytics.actors.DataBucketAnalyticsChangeActor.java

/** Combine the analytic thread level results and the per-job results into a single reply
 * @param top_level/* w w w .ja  va  2s  .c o m*/
 * @param per_job
 * @param source
 * @return
 */
protected final static CompletableFuture<BucketActionReplyMessage> combineResults(
        final CompletableFuture<BasicMessageBean> top_level,
        final List<CompletableFuture<BasicMessageBean>> per_job, final String source) {
    if (per_job.isEmpty()) {
        return top_level.thenApply(reply -> new BucketActionHandlerMessage(source, reply));
    } else { // slightly more complex:

        // First off wait for them all to complete:
        final CompletableFuture<?>[] futures = per_job.toArray(new CompletableFuture<?>[0]);

        return top_level.thenCombine(CompletableFuture.allOf(futures), (thread, __) -> {
            List<BasicMessageBean> replies = Stream.concat(Lambdas.get(() -> {
                if (thread.success() && ((null == thread.message()) || thread.message().isEmpty())) {
                    // Ignore top level, it's not very interesting
                    return Stream.empty();
                } else
                    return Stream.of(thread);
            }), per_job.stream().map(cf -> cf.join())

            ).collect(Collectors.toList());

            return (BucketActionReplyMessage) new BucketActionCollectedRepliesMessage(source, replies,
                    Collections.emptySet(), Collections.emptySet());
        }).exceptionally(t -> {
            return (BucketActionReplyMessage) new BucketActionHandlerMessage(source,
                    ErrorUtils.buildErrorMessage(DataBucketAnalyticsChangeActor.class.getSimpleName(), source,
                            ErrorUtils.getLongForm("{0}", t)));
        });
    }
}

From source file:com.ikanow.aleph2.data_import_manager.analytics.actors.DataBucketAnalyticsChangeActor.java

/** Creates a query component to get all the shared library beans i need
 * @param bucket/*from www  .j av  a2s.  com*/
 * @param cache_tech_jar_only
 * @return
 */
protected static QueryComponent<SharedLibraryBean> getQuery(final DataBucketBean bucket,
        final boolean cache_tech_jar_only) {
    final String technology = getAnalyticsTechnologyName(bucket).get(); //(non-empty by construction)
    final SingleQueryComponent<SharedLibraryBean> tech_query = CrudUtils.anyOf(SharedLibraryBean.class)
            .when(SharedLibraryBean::_id, technology).when(SharedLibraryBean::path_name, technology);

    final Stream<SingleQueryComponent<SharedLibraryBean>> other_libs = cache_tech_jar_only ? Stream.empty()
            : Optionals.ofNullable(bucket.analytic_thread().jobs()).stream()
                    .flatMap(a_job -> Stream.concat(
                            Optional.ofNullable(a_job.module_name_or_id()).map(Stream::of)
                                    .orElse(Stream.empty()),
                            Optionals.ofNullable(a_job.library_names_or_ids()).stream()))
                    .map(name -> {
                        return CrudUtils.anyOf(SharedLibraryBean.class).when(SharedLibraryBean::_id, name)
                                .when(SharedLibraryBean::path_name, name);
                    });

    return CrudUtils.<SharedLibraryBean>anyOf(Stream.concat(Stream.of(tech_query), other_libs));
}

From source file:com.evolveum.midpoint.model.impl.lens.TestAssignmentProcessor2.java

private AssignmentType findAssignmentOrInducement(AbstractRoleType source, AbstractRoleType target) {
    return Stream.concat(source.getAssignment().stream(), source.getInducement().stream())
            .filter(a -> a.getTargetRef() != null && target.getOid().equals(a.getTargetRef().getOid()))
            .findFirst().orElseThrow(() -> new IllegalStateException(
                    source + " contains no assignment/inducement to " + target));
}

From source file:org.trellisldp.http.AbstractTrellisHttpResourceTest.java

private static Stream<IRI> ldpResourceSupertypes(final IRI ldpType) {
    return Stream.of(ldpType).filter(t -> nonNull(LDP.getSuperclassOf(t)) || LDP.Resource.equals(t))
            .flatMap(t -> Stream.concat(ldpResourceSupertypes(LDP.getSuperclassOf(t)), Stream.of(t)));
}

From source file:org.trellisldp.http.AbstractTrellisHttpResourceTest.java

private Stream<Executable> checkLdpTypeHeaders(final Response res, final IRI ldpType) {
    final Set<String> subTypes = ldpResourceSupertypes(ldpType).map(IRI::getIRIString).collect(toSet());
    final Set<String> responseTypes = getLinks(res).stream().filter(link -> "type".equals(link.getRel()))
            .map(link -> link.getUri().toString()).collect(toSet());
    return Stream.concat(
            subTypes.stream()/* w  w  w .ja va2s .co  m*/
                    .map(t -> () -> assertTrue(responseTypes.contains(t),
                            "Response type doesn't contain LDP subtype: " + t)),
            responseTypes.stream().map(t -> () -> assertTrue(subTypes.contains(t),
                    "Subtype " + t + " not present in response type for: " + t)));
}