Example usage for java.util.stream Stream concat

List of usage examples for java.util.stream Stream concat

Introduction

In this page you can find the example usage for java.util.stream Stream concat.

Prototype

public static <T> Stream<T> concat(Stream<? extends T> a, Stream<? extends T> b) 

Source Link

Document

Creates a lazily concatenated stream whose elements are all the elements of the first stream followed by all the elements of the second stream.

Usage

From source file:org.eclipse.packagedrone.repo.channel.web.channel.ChannelController.java

private static Stream<ChannelListEntry> toEntry(final ChannelInformation info) {
    final Stream<ChannelListEntry> idStream = Stream
            .of(fromChannel(info, Modifier.PRIMARY, info.getId(), "id"));
    final Stream<ChannelListEntry> nameStream = info.getNames().stream()
            .map(name -> fromChannel(info, Modifier.DEFAULT, name, "name"));
    return Stream.concat(idStream, nameStream);
}

From source file:com.wrmsr.wava.basic.Basics.java

@CheckReturnValue
public static BasicSet transformBasics(StreamBasicTransform op, BasicSet basics) {
    for (Name name : basics) {
        if (basics.contains(name)) {
            basics = Stream.concat(op.apply(basics, basics.get(name)), Stream.of(basics)).findFirst().get();
        }/*  w  ww . j  av a2s  .  c  om*/
    }
    return basics;
}

From source file:org.rakam.client.builder.document.SlateDocumentGenerator.java

private MarkdownBuilder generateApiTags(MarkdownBuilder markdownBuilder) {
    if (!swagger.getTags().isEmpty()) {
        Map<String, Tag> tags = swagger.getTags().stream()
                .collect(Collectors.toMap(t -> t.getName().toLowerCase(), Function.identity()));
        Set<String> nonOrderedTags = new HashSet<>(tags.keySet());
        nonOrderedTags.removeAll(tagsOrder);
        Stream.concat(tagsOrder.stream(), nonOrderedTags.stream()).forEachOrdered(tagName -> {
            Tag tag = tags.get(tagName.toLowerCase());
            if (tag == null) {
                LOGGER.warn("tag not found:" + tagName);
            } else {
                String name = tag.getName();
                String description = tag.getDescription();
                markdownBuilder.documentTitle(
                        CaseFormat.UPPER_UNDERSCORE.to(CaseFormat.UPPER_CAMEL, name.replaceAll("-", " ")))
                        .newLine().textLine(description).newLine();
                processOperation(markdownBuilder, name);
            }//from   w  ww  . j  a  v  a2s  .c  om
        });
        markdownBuilder.newLine();
    }
    return markdownBuilder;
}

From source file:com.thinkbiganalytics.feedmgr.rest.controller.FeedsController.java

@GET
@Path("{id}/actions/change/allowed")
@Produces(MediaType.APPLICATION_JSON)//w w  w .  j  a va2  s. com
@ApiOperation("Constructs and returns a permission change request for a set of users/groups containing the actions that the requester may permit or revoke.")
@ApiResponses({
        @ApiResponse(code = 200, message = "Returns the change request that may be modified by the client and re-posted.", response = PermissionsChange.class),
        @ApiResponse(code = 400, message = "The type is not valid.", response = RestResponseStatus.class),
        @ApiResponse(code = 404, message = "No feed exists with the specified ID.", response = RestResponseStatus.class) })
public PermissionsChange getAllowedPermissionsChange(@PathParam("id") String feedIdStr,
        @QueryParam("type") String changeType, @QueryParam("user") Set<String> userNames,
        @QueryParam("group") Set<String> groupNames) {
    if (StringUtils.isBlank(changeType)) {
        throw new WebApplicationException("The query parameter \"type\" is required", Status.BAD_REQUEST);
    }

    Set<? extends Principal> users = Arrays.stream(this.actionsTransform.asUserPrincipals(userNames))
            .collect(Collectors.toSet());
    Set<? extends Principal> groups = Arrays.stream(this.actionsTransform.asGroupPrincipals(groupNames))
            .collect(Collectors.toSet());

    return this.securityService
            .createFeedPermissionChange(feedIdStr, ChangeType.valueOf(changeType.toUpperCase()),
                    Stream.concat(users.stream(), groups.stream()).collect(Collectors.toSet()))
            .orElseThrow(() -> new WebApplicationException(
                    "A feed with the given ID does not exist: " + feedIdStr, Status.NOT_FOUND));
}

From source file:fi.hsl.parkandride.itest.RequestLogITest.java

private void concurrentlyGenerateLogs(int numberOfRequests, int numberOfUpdates) {
    withDate(DateTime.now().withTime(12, 2, 0, 0), () -> {
        final Stream<CompletableFuture<Integer>> statusCodes = range(0, numberOfRequests).parallel()
                .mapToObj(i -> {// w w  w  .  j ava 2 s  .com
                    final Response response = given().header(SOURCE_HEADER, WEB_UI_SOURCE).when()
                            .get(UrlSchema.CAPACITY_TYPES).thenReturn();
                    return CompletableFuture.completedFuture(response.statusCode());
                });

        final Stream<CompletableFuture<Integer>> updates = range(0, numberOfUpdates).parallel().mapToObj(i -> {
            batchingRequestLogService.updateRequestLogs();
            return CompletableFuture.completedFuture(0);
        });

        try {
            CompletableFuture.allOf(Stream.concat(statusCodes, updates).toArray(i -> new CompletableFuture[i]))
                    .get();
        } catch (InterruptedException | ExecutionException e) {
            e.printStackTrace();
            throw new AssertionFailedError(e.getMessage());
        }
    });
}

From source file:org.flowable.engine.impl.dynamic.AbstractDynamicStateManager.java

public List<MoveExecutionEntityContainer> resolveMoveExecutionEntityContainers(
        ChangeActivityStateBuilderImpl changeActivityStateBuilder,
        Optional<String> migrateToProcessDefinitionId, Map<String, Object> variables,
        CommandContext commandContext) {
    List<MoveExecutionEntityContainer> moveExecutionEntityContainerList = new ArrayList<>();
    if (changeActivityStateBuilder.getMoveExecutionIdList().size() > 0) {
        for (MoveExecutionIdContainer executionContainer : changeActivityStateBuilder
                .getMoveExecutionIdList()) {
            //Executions belonging to the same parent should move together - i.e multipleExecution to single activity
            Map<String, List<ExecutionEntity>> executionsByParent = new HashMap<>();
            for (String executionId : executionContainer.getExecutionIds()) {
                ExecutionEntity execution = resolveActiveExecution(executionId, commandContext);
                List<ExecutionEntity> executionEntities = executionsByParent
                        .computeIfAbsent(execution.getParentId(), k -> new ArrayList<>());
                executionEntities.add(execution);
            }/*from  w  w w  . j  ava2s .  c o  m*/
            executionsByParent.values().forEach(executions -> {
                MoveExecutionEntityContainer moveExecutionEntityContainer = new MoveExecutionEntityContainer(
                        executions, executionContainer.getMoveToActivityIds());
                executionContainer.getNewAssigneeId().ifPresent(moveExecutionEntityContainer::setNewAssigneeId);
                moveExecutionEntityContainerList.add(moveExecutionEntityContainer);
            });
        }
    }

    if (changeActivityStateBuilder.getMoveActivityIdList().size() > 0) {
        for (MoveActivityIdContainer activityContainer : changeActivityStateBuilder.getMoveActivityIdList()) {
            Map<String, List<ExecutionEntity>> activitiesExecutionsByMultiInstanceParentId = new HashMap<>();
            List<ExecutionEntity> activitiesExecutionsNotInMultiInstanceParent = new ArrayList<>();

            for (String activityId : activityContainer.getActivityIds()) {
                List<ExecutionEntity> activityExecutions = resolveActiveExecutions(
                        changeActivityStateBuilder.getProcessInstanceId(), activityId, commandContext);
                if (!activityExecutions.isEmpty()) {

                    // check for a multi instance root execution
                    ExecutionEntity miExecution = null;
                    boolean isInsideMultiInstance = false;
                    for (ExecutionEntity possibleMIExecution : activityExecutions) {
                        if (possibleMIExecution.isMultiInstanceRoot()) {
                            miExecution = possibleMIExecution;
                            isInsideMultiInstance = true;
                            break;
                        }

                        if (isExecutionInsideMultiInstance(possibleMIExecution)) {
                            isInsideMultiInstance = true;
                        }
                    }

                    //If inside a multiInstance, we create one container for each execution
                    if (isInsideMultiInstance) {

                        //We group by the parentId (executions belonging to the same parent execution instance
                        // i.e. gateways nested in MultiInstance subProcesses, need to be in the same move container)
                        Stream<ExecutionEntity> executionEntitiesStream = activityExecutions.stream();
                        if (miExecution != null) {
                            executionEntitiesStream = executionEntitiesStream
                                    .filter(ExecutionEntity::isMultiInstanceRoot);
                        }

                        executionEntitiesStream.forEach(childExecution -> {
                            String parentId = childExecution.isMultiInstanceRoot() ? childExecution.getId()
                                    : childExecution.getParentId();
                            List<ExecutionEntity> executionEntities = activitiesExecutionsByMultiInstanceParentId
                                    .computeIfAbsent(parentId, k -> new ArrayList<>());
                            executionEntities.add(childExecution);
                        });

                    } else {
                        ExecutionEntity execution = activityExecutions.iterator().next();
                        activitiesExecutionsNotInMultiInstanceParent.add(execution);
                    }
                }
            }

            //Create a move container for each execution group (executionList)
            Stream.concat(activitiesExecutionsByMultiInstanceParentId.values().stream(),
                    Stream.of(activitiesExecutionsNotInMultiInstanceParent))
                    .filter(executions -> executions != null && !executions.isEmpty())
                    .forEach(executions -> moveExecutionEntityContainerList.add(
                            createMoveExecutionEntityContainer(activityContainer, executions, commandContext)));
        }
    }

    return moveExecutionEntityContainerList;
}

From source file:org.apache.streams.twitter.converter.util.TwitterActivityUtil.java

/**
 * Formats the ID to conform with the Apache Streams activity ID convention.
 * @param idparts the parts of the ID to join
 * @return a valid Activity ID in format "id:twitter:part1:part2:...partN"
 *//*from   ww  w. ja  va 2 s .  c o  m*/
public static String formatId(String... idparts) {
    return String.join(":", Stream.concat(Arrays.stream(new String[] { "id:twitter" }), Arrays.stream(idparts))
            .collect(Collectors.toList()));
}

From source file:dao.SearchDAO.java

public static List<String> getAutoCompleteList() {
    List<String> cachedAutoCompleteList = (List<String>) Cache.get(SEARCH_AUTOCOMPLETE_LIST);
    if (cachedAutoCompleteList == null || cachedAutoCompleteList.size() == 0) {
        //List<String> metricList = getJdbcTemplate().queryForList(GET_METRIC_AUTO_COMPLETE_LIST, String.class);
        List<String> flowList = getJdbcTemplate().queryForList(GET_FLOW_AUTO_COMPLETE_LIST, String.class);
        List<String> jobList = getJdbcTemplate().queryForList(GET_JOB_AUTO_COMPLETE_LIST, String.class);
        List<String> datasetList = getJdbcTemplate().queryForList(GET_DATASET_AUTO_COMPLETE_LIST, String.class);
        cachedAutoCompleteList = Stream
                .concat(datasetList.stream(), Stream.concat(flowList.stream(), jobList.stream()))
                .collect(Collectors.toList());
        Collections.sort(cachedAutoCompleteList);
        Cache.set(SEARCH_AUTOCOMPLETE_LIST, cachedAutoCompleteList, 60 * 60);
    }/*from   w ww .  j  a  v a2  s.  c  om*/

    return cachedAutoCompleteList;
}

From source file:org.openecomp.sdc.be.model.operations.impl.AttributeOperation.java

private Either<List<ComponentInstanceAttribute>, TitanOperationStatus> mergeAttributesResults(
        Either<List<ComponentInstanceAttribute>, TitanOperationStatus> eitherAttributesThatDoesNotExistOnRI,
        Either<List<ComponentInstanceAttribute>, TitanOperationStatus> eitherAttributesThatExistOnRI) {

    Either<List<ComponentInstanceAttribute>, TitanOperationStatus> result;
    if (eitherAttributesThatExistOnRI.isRight()) {
        result = Either.right(eitherAttributesThatExistOnRI.right().value());
    } else if (eitherAttributesThatDoesNotExistOnRI.isRight()) {
        result = Either.right(eitherAttributesThatDoesNotExistOnRI.right().value());
    } else {/*from   ww w  .j  av a 2s  .  c o  m*/
        final List<ComponentInstanceAttribute> attributesThatExistOnRI = eitherAttributesThatExistOnRI.left()
                .value();
        final List<ComponentInstanceAttribute> attributesThatDoesNotExistOnRI = eitherAttributesThatDoesNotExistOnRI
                .left().value();
        Set<String> attributesIdThatExistOnRI = attributesThatExistOnRI.stream().map(e -> e.getUniqueId())
                .collect(Collectors.toSet());
        // Attributes From The Resource Without attributes that also exist
        // on the instance
        Stream<ComponentInstanceAttribute> filterAttributesThatDoesNotExistOnRI = attributesThatDoesNotExistOnRI
                .stream().filter(e -> !attributesIdThatExistOnRI.contains(e.getUniqueId()));
        // Add Fields From Resource Attributes
        fillAttributeInfoFromResource(attributesThatExistOnRI, attributesThatDoesNotExistOnRI);
        // Adding the Attributes on the instance for the full list
        List<ComponentInstanceAttribute> mergedList = Stream
                .concat(filterAttributesThatDoesNotExistOnRI, attributesThatExistOnRI.stream())
                .collect(Collectors.toList());
        result = Either.left(mergedList);
    }
    return result;
}

From source file:com.ikanow.aleph2.analytics.spark.utils.SparkTechnologyUtils.java

/** Cache the system and user classpaths and return HDFS paths
 * @param bucket/*from   ww  w .j  a va 2 s  .  c  om*/
 * @param main_jar_path - my JAR path
 * @param context
 * @throws IOException 
 * @throws ExecutionException 
 * @throws InterruptedException 
 * @throws IllegalArgumentException 
 */
public static List<String> getCachedJarList(final DataBucketBean bucket, final String main_jar_path,
        final IAnalyticsContext context)
        throws IllegalArgumentException, InterruptedException, ExecutionException, IOException {
    final FileContext fc = context.getServiceContext().getStorageService()
            .getUnderlyingPlatformDriver(FileContext.class, Optional.empty()).get();
    final String root_path = context.getServiceContext().getStorageService().getRootPath();
    final String tmp_dir = System.getProperty("java.io.tmpdir");

    // Aleph2 libraries: need to cache them
    final Stream<String> context_stream = context.getAnalyticsContextLibraries(Optional.empty()).stream()
            .filter(jar -> !jar.equals(main_jar_path)) // (this is the service case, eg "/opt/aleph2-home/lib/aleph2_spark_analytic_services.jar")
            .map(Lambdas.wrap_u(f_str -> {

                final Tuple3<File, Path, FileStatus> f_p_fs = f_str.contains("core_distributed_services")
                        || f_str.contains("data_model") ? removeSparkConflictsAndCache(f_str, root_path, fc)
                                : checkCache(f_str, root_path, fc);

                if (null == f_p_fs._3()) { //cache doesn't exist
                    // Local version
                    try (FSDataOutputStream outer = fc.create(f_p_fs._2(), EnumSet.of(CreateFlag.CREATE), // ie should fail if the destination file already exists 
                            org.apache.hadoop.fs.Options.CreateOpts.createParent())) {
                        Files.copy(f_p_fs._1(), outer.getWrappedStream());
                    } catch (FileAlreadyExistsException e) {//(carry on - the file is versioned so it can't be out of date)
                    }
                    if (f_p_fs._1().getPath().startsWith(tmp_dir)) { // (delete tmp files)
                        f_p_fs._1().delete();
                    }
                }

                return f_p_fs._2();
            })).map(p -> transformFromPath(p.toString()));

    // User libraries: this is slightly easier since one of the 2 keys
    // is the HDFS path (the other is the _id)
    final Stream<String> lib_stream = context
            .getAnalyticsLibraries(Optional.of(bucket), bucket.analytic_thread().jobs()).get().entrySet()
            .stream().map(kv -> kv.getKey()).filter(jar -> !jar.equals(main_jar_path)) // (this is the uploaded case, eg "/app/aleph2/library/blah.jar")
            .filter(path -> path.startsWith(root_path)).map(s -> transformFromPath(s));

    return Stream.concat(context_stream, lib_stream).collect(Collectors.toList());
}