Example usage for java.util.stream Stream concat

List of usage examples for java.util.stream Stream concat

Introduction

In this page you can find the example usage for java.util.stream Stream concat.

Prototype

public static <T> Stream<T> concat(Stream<? extends T> a, Stream<? extends T> b) 

Source Link

Document

Creates a lazily concatenated stream whose elements are all the elements of the first stream followed by all the elements of the second stream.

Usage

From source file:edu.pitt.dbmi.ccd.anno.group.GroupController.java

@RequestMapping(value = GroupLinks.JOIN, method = RequestMethod.POST)
@ResponseStatus(HttpStatus.NO_CONTENT)/*from   w w w  .j  av a2s . c  om*/
@ResponseBody
public void join(@AuthenticationPrincipal UserAccountDetails principal, @PathVariable String name)
        throws NotFoundException {
    final UserAccount requester = principal.getUserAccount();
    final Group group = groupService.findByName(name);
    if (group == null) {
        throw new GroupNotFoundException(name);
    }
    if (Stream.concat(group.getModerators().stream(), group.getMembers().stream()).map(UserAccount::getId)
            .noneMatch(u -> u.equals(requester.getId()))) {
        group.addRequester(requester);
        groupService.save(group);
    }
}

From source file:org.sonar.server.computation.task.projectanalysis.step.SendIssueNotificationsStepTest.java

@Test
public void send_new_issues_notification_to_user_only_for_those_assigned_to_her() {
    Random random = new Random();
    Integer[] assigned = IntStream.range(0, 1 + random.nextInt(10)).mapToObj(i -> 10_000 * i)
            .toArray(Integer[]::new);
    Integer[] assignedToOther = IntStream.range(0, 1 + random.nextInt(10))
            .mapToObj(i -> 10 + random.nextInt(100)).toArray(Integer[]::new);
    Duration expectedEffort = Duration.create(Arrays.stream(assigned).mapToInt(i -> i).sum());
    String assignee = randomAlphanumeric(5);
    String otherAssignee = randomAlphanumeric(5);
    List<DefaultIssue> issues = Stream
            .concat(Arrays.stream(assigned)
                    .map(effort -> new DefaultIssue().setType(randomRuleType).setEffort(Duration.create(effort))
                            .setAssignee(assignee).setCreationDate(new Date(ANALYSE_DATE))),
                    Arrays.stream(assignedToOther)
                            .map(effort -> new DefaultIssue().setType(randomRuleType)
                                    .setEffort(Duration.create(effort)).setAssignee(otherAssignee)
                                    .setCreationDate(new Date(ANALYSE_DATE))))
            .collect(Collectors.toList());
    Collections.shuffle(issues);//  w  ww. jav  a2s  .  c  o  m
    DiskCache<DefaultIssue>.DiskAppender issueCache = this.issueCache.newAppender();
    issues.forEach(issueCache::append);
    when(notificationService.hasProjectSubscribersForTypes(PROJECT.getUuid(),
            SendIssueNotificationsStep.NOTIF_TYPES)).thenReturn(true);
    MyNewIssuesNotification myNewIssuesNotificationMock2 = createMyNewIssuesNotificationMock();
    when(newIssuesNotificationFactory.newMyNewIssuesNotification()).thenReturn(myNewIssuesNotificationMock)
            .thenReturn(myNewIssuesNotificationMock2);

    underTest.execute();

    verify(notificationService).deliver(newIssuesNotificationMock);
    verify(notificationService).deliver(myNewIssuesNotificationMock);
    verify(notificationService).deliver(myNewIssuesNotificationMock2);

    MyNewIssuesNotification effectiveMyNewIssuesNotificationMock = this.myNewIssuesNotificationMock;
    try {
        verify(effectiveMyNewIssuesNotificationMock).setAssignee(assignee);
    } catch (ArgumentsAreDifferent e) {
        assertThat(e.getMessage())
                .contains("Wanted:\nmyNewIssuesNotification.setAssignee(\"" + assignee + "\")")
                .contains("Actual invocation has different arguments:\n"
                        + "myNewIssuesNotification.setAssignee(\"" + otherAssignee + "\")");
        effectiveMyNewIssuesNotificationMock = myNewIssuesNotificationMock2;
    }
    ArgumentCaptor<NewIssuesStatistics.Stats> statsCaptor = ArgumentCaptor
            .forClass(NewIssuesStatistics.Stats.class);
    verify(effectiveMyNewIssuesNotificationMock).setStatistics(eq(PROJECT.getName()), statsCaptor.capture());
    verify(effectiveMyNewIssuesNotificationMock).setDebt(expectedEffort);
    NewIssuesStatistics.Stats stats = statsCaptor.getValue();
    assertThat(stats.hasIssues()).isTrue();
    // just checking all issues have been added to the stats
    DistributedMetricStatsInt severity = stats.getDistributedMetricStats(NewIssuesStatistics.Metric.RULE_TYPE);
    assertThat(severity.getOnLeak()).isEqualTo(assigned.length);
    assertThat(severity.getOffLeak()).isEqualTo(0);
    assertThat(severity.getTotal()).isEqualTo(assigned.length);
}

From source file:org.dllearner.utilities.QueryUtils.java

public Set<Triple> extractOutgoingTriplePatternsTrans(Query query, Node node) {
    return Stream.concat(extractOutgoingTriplePatterns(query, node).stream(),
            extractOutgoingTriplePatterns(query, node).stream()
                    .map(tp -> extractOutgoingTriplePatternsTrans(query, tp.getObject()))
                    .flatMap(set -> set.stream()))
            .collect(Collectors.toSet());
}

From source file:com.ikanow.aleph2.management_db.mongodb.services.IkanowV1SyncService_LibraryJars.java

/** Top level handler for update status based on the result
 * @param id//from   w  w  w  .j  a  va 2s . c  o  m
 * @param fres
 * @param disable_on_failure
 * @param share_db
 * @return true - if share updated with errors, false otherwise
 */
protected static CompletableFuture<Boolean> updateV1ShareErrorStatus_top(final String id,
        final ManagementFuture<?> fres, final IManagementCrudService<SharedLibraryBean> library_mgmt,
        final ICrudService<JsonNode> share_db, final boolean create_not_update) {
    return fres.getManagementResults().<Boolean>thenCompose(res -> {
        try {
            fres.get(); // (check if the DB side call has failed)
            return updateV1ShareErrorStatus(new Date(), id, res, library_mgmt, share_db, create_not_update);
        } catch (Exception e) { // DB-side call has failed, create ad hoc error
            final Collection<BasicMessageBean> errs = Stream
                    .concat(res.stream(), Stream.of(new BasicMessageBean(new Date(), false, "(unknown)",
                            "(unknown)", null, ErrorUtils.getLongForm("{0}", e), null)))
                    .collect(Collectors.toList());

            return updateV1ShareErrorStatus(new Date(), id, errs, library_mgmt, share_db, create_not_update);
        }
    });
}

From source file:net.sf.jabref.gui.entryeditor.EntryEditor.java

private void setupFieldPanels() {
    tabbed.removeAll();/*  ww  w.j av  a  2s.  co  m*/
    tabs.clear();

    EntryType type = EntryTypes.getTypeOrDefault(entry.getType(),
            this.frame.getCurrentBasePanel().getBibDatabaseContext().getMode());

    // required fields
    List<String> requiredFields = addRequiredTab(type);

    // optional fields
    List<String> displayedOptionalFields = new ArrayList<>();

    if ((type.getOptionalFields() != null) && !type.getOptionalFields().isEmpty()) {
        if (!frame.getCurrentBasePanel().getBibDatabaseContext().isBiblatexMode()) {
            addOptionalTab(type);
        } else {
            displayedOptionalFields.addAll(type.getPrimaryOptionalFields());
            displayedOptionalFields.addAll(type.getSecondaryOptionalFields());

            addOptionalTab(type);

            Set<String> deprecatedFields = new HashSet<>(EntryConverter.FIELD_ALIASES_TEX_TO_LTX.keySet());
            deprecatedFields.add(FieldName.YEAR);
            deprecatedFields.add(FieldName.MONTH);
            List<String> secondaryOptionalFields = type.getSecondaryOptionalFields();
            List<String> optionalFieldsNotPrimaryOrDeprecated = new ArrayList<>(secondaryOptionalFields);
            optionalFieldsNotPrimaryOrDeprecated.removeAll(deprecatedFields);

            // Get list of all optional fields of this entry and their aliases
            Set<String> optionalFieldsAndAliases = new HashSet<>();
            for (String field : type.getOptionalFields()) {
                optionalFieldsAndAliases.add(field);
                if (EntryConverter.FIELD_ALIASES_LTX_TO_TEX.containsKey(field)) {
                    optionalFieldsAndAliases.add(EntryConverter.FIELD_ALIASES_LTX_TO_TEX.get(field));
                }
            }

            // Get all optional fields which are deprecated
            Set<String> usedOptionalFieldsDeprecated = new HashSet<>(deprecatedFields);
            usedOptionalFieldsDeprecated.retainAll(optionalFieldsAndAliases);

            // Add tabs
            EntryEditorTab optPan2 = new EntryEditorTab(frame, panel, optionalFieldsNotPrimaryOrDeprecated,
                    this, false, true, Localization.lang("Optional fields 2"));
            if (optPan2.fileListEditor != null) {
                fileListEditor = optPan2.fileListEditor;
            }
            tabbed.addTab(Localization.lang("Optional fields 2"), IconTheme.JabRefIcon.OPTIONAL.getSmallIcon(),
                    optPan2.getPane(), Localization.lang("Show optional fields"));
            tabs.add(optPan2);

            if (!usedOptionalFieldsDeprecated.isEmpty()) {
                EntryEditorTab optPan3;
                optPan3 = new EntryEditorTab(frame, panel, new ArrayList<>(usedOptionalFieldsDeprecated), this,
                        false, true, Localization.lang("Deprecated fields"));
                if (optPan3.fileListEditor != null) {
                    fileListEditor = optPan3.fileListEditor;
                }
                tabbed.addTab(Localization.lang("Deprecated fields"),
                        IconTheme.JabRefIcon.OPTIONAL.getSmallIcon(), optPan3.getPane(),
                        Localization.lang("Show deprecated BibTeX fields"));
                tabs.add(optPan3);
            }
        }
    }

    // other fields
    List<String> displayedFields = Stream.concat(requiredFields.stream(), displayedOptionalFields.stream())
            .map(String::toLowerCase).collect(Collectors.toList());
    List<String> otherFields = entry.getFieldNames().stream().map(String::toLowerCase)
            .filter(f -> !displayedFields.contains(f)).collect(Collectors.toList());
    otherFields.remove(BibEntry.KEY_FIELD);
    otherFields.removeAll(Globals.prefs.getCustomTabFieldNames());

    if (!otherFields.isEmpty()) {
        addOtherTab(otherFields);
    }

    // general fields from preferences
    addGeneralTabs();
    // source tab
    addSourceTab();
}

From source file:com.spotify.styx.api.BackfillResource.java

private List<RunStateData> retrieveBackfillStatuses(Backfill backfill) {
    final List<RunStateData> processedStates;
    final List<RunStateData> waitingStates;

    Map<WorkflowInstance, Long> activeWorkflowInstances;
    try {//  w  ww  . jav a  2  s.co m
        activeWorkflowInstances = storage.readActiveWorkflowInstances();
    } catch (IOException e) {
        throw Throwables.propagate(e);
    }

    final List<Instant> processedInstants = rangeOfInstants(backfill.start(), backfill.nextTrigger(),
            backfill.schedule());
    processedStates = processedInstants.parallelStream().map(instant -> {
        final WorkflowInstance wfi = WorkflowInstance.create(backfill.workflowId(),
                toParameter(backfill.schedule(), instant));
        Optional<RunState> restoredStateOpt = ReplayEvents.getBackfillRunState(wfi, activeWorkflowInstances,
                storage, backfill.id());
        if (restoredStateOpt.isPresent()) {
            RunState state = restoredStateOpt.get();
            return RunStateData.create(state.workflowInstance(), state.state().name(), state.data());
        } else {
            return RunStateData.create(wfi, UNKNOWN, StateData.zero());
        }
    }).collect(toList());

    final List<Instant> waitingInstants = rangeOfInstants(backfill.nextTrigger(), backfill.end(),
            backfill.schedule());
    waitingStates = waitingInstants.stream().map(instant -> {
        final WorkflowInstance wfi = WorkflowInstance.create(backfill.workflowId(),
                toParameter(backfill.schedule(), instant));
        return RunStateData.create(wfi, WAITING, StateData.zero());
    }).collect(toList());

    return Stream.concat(processedStates.stream(), waitingStates.stream()).collect(toList());
}

From source file:com.thinkbiganalytics.feedmgr.rest.controller.NifiIntegrationRestController.java

/**
 * Finds controller services of the specified type.
 *
 * @param processGroupId the process group id
 * @param type           the type to match
 * @return the list of matching controller services
 *//*w  w w. j ava 2 s.  co  m*/
@GET
@Path("/controller-services/process-group/{processGroupId}")
@Produces(MediaType.APPLICATION_JSON)
@ApiOperation("Finds controller services of the specified type.")
@ApiResponses({
        @ApiResponse(code = 200, message = "Returns the matching controller services.", response = ControllerServiceDTO.class, responseContainer = "Set"),
        @ApiResponse(code = 400, message = "The type cannot be empty.", response = RestResponseStatus.class),
        @ApiResponse(code = 404, message = "The process group cannot be found.", response = RestResponseStatus.class),
        @ApiResponse(code = 500, message = "The process group is unavailable.", response = RestResponseStatus.class) })
public Response getControllerServices(@Nonnull @PathParam("processGroupId") final String processGroupId,
        @Nullable @QueryParam("type") final String type) {
    // Verify parameters
    if (StringUtils.isBlank(processGroupId)) {
        throw new NotFoundException(STRINGS.getString("getControllerServices.missingProcessGroup"));
    }
    if (StringUtils.isBlank(type)) {
        throw new BadRequestException(STRINGS.getString("getControllerServices.missingType"));
    }

    // Determine allowed service types
    final Stream<String> subTypes = nifiRestClient.controllerServices().getTypes(type).stream()
            .map(DocumentedTypeDTO::getType);
    final Set<String> allowedTypes = Stream.concat(Stream.of(type), subTypes).collect(Collectors.toSet());

    // Filter controller services
    final Set<ControllerServiceDTO> controllerServices = ("all".equalsIgnoreCase(processGroupId)
            || "root".equalsIgnoreCase(processGroupId))
                    ? nifiRestClient.processGroups().getControllerServices("root")
                    : nifiRestClient.processGroups().getControllerServices(processGroupId);
    final Set<ControllerServiceDTO> matchingControllerServices = controllerServices.stream()
            .filter(controllerService -> allowedTypes.contains(controllerService.getType()))
            .filter(datasourceService.getControllerServiceAccessControlFilter()).collect(Collectors.toSet());
    return Response.ok(matchingControllerServices).build();
}

From source file:com.thinkbiganalytics.feedmgr.rest.controller.FeedCategoryRestController.java

@GET
@Path("{categoryId}/actions/change")
@Produces(MediaType.APPLICATION_JSON)//from   ww w  . j a v a  2s.c  om
@ApiOperation("Constructs and returns a permission change request for a set of users/groups containing the actions that the requester may permit or revoke.")
@ApiResponses({
        @ApiResponse(code = 200, message = "Returns the change request that may be modified by the client and re-posted.", response = PermissionsChange.class),
        @ApiResponse(code = 400, message = "The type is not valid.", response = RestResponseStatus.class),
        @ApiResponse(code = 404, message = "No category exists with the specified ID.", response = RestResponseStatus.class) })
public Response getAllowedPermissionsChange(@PathParam("categoryId") String categoryIdStr,
        @QueryParam("type") String changeType, @QueryParam("user") Set<String> userNames,
        @QueryParam("group") Set<String> groupNames) {
    if (StringUtils.isBlank(changeType)) {
        throw new WebApplicationException("The query parameter \"type\" is required", Status.BAD_REQUEST);
    }

    Set<? extends Principal> users = Arrays.stream(this.securityTransform.asUserPrincipals(userNames))
            .collect(Collectors.toSet());
    Set<? extends Principal> groups = Arrays.stream(this.securityTransform.asGroupPrincipals(groupNames))
            .collect(Collectors.toSet());

    return this.securityService
            .createCategoryPermissionChange(categoryIdStr, ChangeType.valueOf(changeType.toUpperCase()),
                    Stream.concat(users.stream(), groups.stream()).collect(Collectors.toSet()))
            .map(p -> Response.ok(p).build()).orElseThrow(() -> new WebApplicationException(
                    "A category with the given ID does not exist: " + categoryIdStr, Status.NOT_FOUND));
}

From source file:org.dllearner.utilities.QueryUtils.java

public Set<Triple> extractIncomingTriplePatternsTrans(Query query, Node node) {
    return Stream.concat(extractIncomingTriplePatterns(query, node).stream(),
            extractIncomingTriplePatterns(query, node).stream()
                    .map(tp -> extractIncomingTriplePatternsTrans(query, tp.getSubject()))
                    .flatMap(set -> set.stream()))
            .collect(Collectors.toSet());
}

From source file:org.apache.archiva.repository.RepositoryRegistry.java

/**
 * Returns all repositories that are registered. There is no defined order of the returned repositories.
 *
 * @return a list of managed and remote repositories
 *///from   w  w  w.j a v a2  s .c o  m
public Collection<Repository> getRepositories() {
    rwLock.readLock().lock();
    try {
        return Stream.concat(managedRepositories.values().stream(), remoteRepositories.values().stream())
                .collect(Collectors.toList());
    } finally {
        rwLock.readLock().unlock();
    }
}