Example usage for java.util.stream Stream concat

List of usage examples for java.util.stream Stream concat

Introduction

In this page you can find the example usage for java.util.stream Stream concat.

Prototype

public static <T> Stream<T> concat(Stream<? extends T> a, Stream<? extends T> b) 

Source Link

Document

Creates a lazily concatenated stream whose elements are all the elements of the first stream followed by all the elements of the second stream.

Usage

From source file:com.ikanow.aleph2.analytics.storm.services.MockAnalyticsContext.java

@Override
public Collection<Object> getUnderlyingArtefacts() {
    if (_state_name == State.IN_TECHNOLOGY) {
        if (!_mutable_state.service_manifest_override.isSet()) {
            throw new RuntimeException(ErrorUtils.SERVICE_RESTRICTIONS);
        }/*from w  ww . j  av  a 2 s .  com*/
        return Stream
                .concat(Stream.of(this, _service_context),
                        _mutable_state.service_manifest_override.get().stream()
                                .map(t2 -> _service_context.getService(t2._1(), t2._2()))
                                .filter(service -> service.isPresent())
                                .flatMap(service -> service.get().getUnderlyingArtefacts().stream()))
                .collect(Collectors.toList());
    } else {
        throw new RuntimeException(ErrorUtils.TECHNOLOGY_NOT_MODULE);
    }
}

From source file:org.niord.core.message.Message.java

/** Returns the set of languages used for this message **/
public Set<String> computeLanguages() {
    return Stream
            .concat(getDescs().stream().map(DescEntity::getLang),
                    getParts().stream().flatMap(p -> p.getDescs().stream()).map(DescEntity::getLang))
            .collect(Collectors.toSet());
}

From source file:org.apache.any23.extractor.calendar.BaseCalendarExtractor.java

private static void extract(ScribeIndex index, WriteContext ctx, BNode node, ICalComponent component,
        ExtractionResult result, boolean writeTimezones) {
    for (ICalProperty property : component.getProperties().values()) {
        ctx.setParent(component);//  ww w.ja  v a  2s  .  c  om
        writeProperty(node, index.getPropertyScribe(property), property, ctx, result);
    }

    Stream<ICalComponent> components = component.getComponents().values().stream();

    if (writeTimezones) {
        Collection<VTimezone> tzs = ctx.getTimezoneInfo().getComponents();
        Set<String> tzIds = tzs.stream().map(tz -> tz.getTimezoneId().getValue()).collect(Collectors.toSet());
        components = Stream.concat(tzs.stream(), components.filter(
                c -> !(c instanceof VTimezone && tzIds.contains(((VTimezone) c).getTimezoneId().getValue()))));
    }

    components.forEachOrdered(child -> {
        BNode childNode = f.createBNode();
        String componentName = index.getComponentScribe(child).getComponentName();
        IRI childType = type(componentName);

        if (childType == null) {
            result.writeTriple(node, predicate(componentName, result), childNode);
        } else {
            result.writeTriple(node, vICAL.component, childNode);
            result.writeTriple(childNode, RDF.TYPE, childType);
        }
        extract(index, ctx, childNode, child, result, false);
    });
}

From source file:com.ikanow.aleph2.management_db.services.DataBucketStatusCrudService.java

@Override
public ManagementFuture<Boolean> updateObjectBySpec(final QueryComponent<DataBucketStatusBean> unique_spec,
        final Optional<Boolean> upsert, final UpdateComponent<DataBucketStatusBean> update) {
    final MethodNamingHelper<DataBucketStatusBean> helper = BeanTemplateUtils.from(DataBucketStatusBean.class);

    if (upsert.orElse(false)) {
        throw new RuntimeException("This method is not supported with upsert set and true");
    }//from   w w  w . ja  v a 2  s . c  o  m

    final Collection<BasicMessageBean> errors = validateUpdateCommand(update);
    if (!errors.isEmpty()) {
        return FutureUtils.createManagementFuture(CompletableFuture.completedFuture(false),
                CompletableFuture.completedFuture(errors));
    }

    // Now perform the update and based on the results we may need to send out instructions
    // to any listening buckets

    final CompletableFuture<Optional<DataBucketStatusBean>> update_reply = _underlying_data_bucket_status_db
            .get().updateAndReturnObjectBySpec(unique_spec, Optional.of(false), update, Optional.of(false),
                    Arrays.asList(helper.field(DataBucketStatusBean::_id),
                            helper.field(DataBucketStatusBean::confirmed_suspended),
                            helper.field(DataBucketStatusBean::confirmed_multi_node_enabled),
                            helper.field(DataBucketStatusBean::confirmed_master_enrichment_type),
                            helper.field(DataBucketStatusBean::suspended),
                            helper.field(DataBucketStatusBean::quarantined_until),
                            helper.field(DataBucketStatusBean::node_affinity)),
                    true);

    try {
        // What happens now depends on the contents of the message         

        // Maybe the user wanted to suspend/resume the bucket:

        final CompletableFuture<Collection<BasicMessageBean>> suspend_future = Lambdas
                .<CompletableFuture<Collection<BasicMessageBean>>>get(() -> {
                    if (update.getAll().containsKey(helper.field(DataBucketStatusBean::suspended))) {

                        // (note this handles suspending the bucket if no handlers are available)
                        return getOperationFuture(update_reply, sb -> sb.suspended(),
                                _underlying_data_bucket_db.get(), _underlying_data_bucket_status_db.get(),
                                _actor_context, _bucket_action_retry_store.get());
                    } else { // (this isn't an error, just nothing to do here)
                        return CompletableFuture.completedFuture(Collections.<BasicMessageBean>emptyList());
                    }
                });

        // Maybe the user wanted to set quarantine on/off:

        final CompletableFuture<Collection<BasicMessageBean>> quarantine_future = Lambdas
                .<CompletableFuture<Collection<BasicMessageBean>>>get(() -> {
                    if (update.getAll().containsKey(helper.field(DataBucketStatusBean::quarantined_until))) {

                        // (note this handles suspending the bucket if no handlers are available)
                        return getOperationFuture(update_reply, sb -> { // (this predicate is slightly more complex)
                            return (null != sb.quarantined_until())
                                    || (new Date().getTime() < sb.quarantined_until().getTime());
                        }, _underlying_data_bucket_db.get(), _underlying_data_bucket_status_db.get(),
                                _actor_context, _bucket_action_retry_store.get());
                    } else { // (this isn't an error, just nothing to do here)
                        return CompletableFuture.completedFuture(Collections.<BasicMessageBean>emptyList());
                    }
                });

        return FutureUtils.createManagementFuture(update_reply.thenApply(o -> o.isPresent()), // whether we updated
                suspend_future.thenCombine(quarantine_future,
                        (f1, f2) -> Stream.concat(f1.stream(), f2.stream()).collect(Collectors.toList())));
        //(+combine error messages from suspend/quarantine operations)
    } catch (Exception e) {
        // This is a serious enough exception that we'll just leave here
        return FutureUtils.createManagementFuture(FutureUtils.returnError(e));
    }
}

From source file:io.fabric8.vertx.maven.plugin.mojos.AbstractRunMojo.java

/**
 * This will resolve the project's test and runtime dependencies along with classes directory, resources directory
 * to the collection of classpath urls//from   w w w. jav  a2s.  c o m
 *
 * @return @{link {@link List<URL>}} which will have all the dependencies, classes directory, resources directory etc.,
 * @throws MojoExecutionException any error that might occur while building collection like resolution errors
 */
protected List<URL> getClassPathUrls() throws MojoExecutionException {
    List<URL> classPathUrls = new ArrayList<>();

    try {
        addProjectResources(classPathUrls);
        addClassesDirectory(classPathUrls);

        Set<Optional<File>> compileAndRuntimeDeps = extractArtifactPaths(this.project.getDependencyArtifacts());

        Set<Optional<File>> transitiveDeps = extractArtifactPaths(this.project.getArtifacts());

        classPathUrls.addAll(Stream.concat(compileAndRuntimeDeps.stream(), transitiveDeps.stream())
                .filter(Optional::isPresent).map(file -> {
                    try {
                        return file.get().toURI().toURL();
                    } catch (Exception e) {
                        getLog().error("Error building classpath", e);
                    }
                    return null;
                }).filter(Objects::nonNull).collect(Collectors.toList()));

    } catch (IOException e) {
        throw new MojoExecutionException("Unable to run:", e);
    }
    return classPathUrls;
}

From source file:netention.core.Core.java

public Stream<Vertex> vertexAuthorStream(final String author) {
    Vertex v = vertex(author, false);/* w ww  .  j  a v  a  2 s  . co m*/
    if (v == null)
        return Stream.empty();
    return Stream.concat(Stream.of(v), stream(v.getEdges(Direction.OUT, "author").spliterator(), false)
            .map(e -> e.getVertex(Direction.OUT)));
}

From source file:com.thinkbiganalytics.feedmgr.rest.controller.DatasourceController.java

@GET
@Path("{id}/actions/allowed")
@Produces(MediaType.APPLICATION_JSON)/*w ww .  ja v  a2s .c om*/
@ApiOperation("Gets the list of actions permitted for the given username and/or groups.")
@ApiResponses({ @ApiResponse(code = 200, message = "Returns the actions.", response = ActionGroup.class),
        @ApiResponse(code = 404, message = "A data source with the given ID does not exist.", response = RestResponseStatus.class) })
public Response getAllowedActions(@PathParam("id") final String datasourceIdStr,
        @QueryParam("user") final Set<String> userNames, @QueryParam("group") final Set<String> groupNames) {
    log.debug("Get allowed actions for data source: {}", datasourceIdStr);

    Set<? extends Principal> users = Arrays.stream(this.securityTransform.asUserPrincipals(userNames))
            .collect(Collectors.toSet());
    Set<? extends Principal> groups = Arrays.stream(this.securityTransform.asGroupPrincipals(groupNames))
            .collect(Collectors.toSet());

    return this.securityService
            .getAllowedDatasourceActions(datasourceIdStr,
                    Stream.concat(users.stream(), groups.stream()).collect(Collectors.toSet()))
            .map(g -> Response.ok(g).build())
            .orElseThrow(() -> new WebApplicationException(
                    "A data source with the given ID does not exist: " + datasourceIdStr,
                    Response.Status.NOT_FOUND));
}

From source file:com.ggvaidya.scinames.model.Dataset.java

/**
 * Returns a Stream of all distinct names referenced from this dataset. This includes 
 * names found in dataset rows and names found in ALL explicit changes (not just 
 * filtered ones!), and nothing else.//w w  w  . j  a  v a  2  s . c o m
 * 
 * @return A Stream of all distinct names referenced from this dataset.
 */
public Stream<Name> getReferencedNames() {
    Stream<Name> namesFromData = getNamesInAllRows().stream();
    Stream<Name> namesFromChanges = explicitChanges.stream().flatMap(ch -> ch.getAllNames().stream());

    return Stream.concat(namesFromData, namesFromChanges).distinct();
}

From source file:com.ikanow.aleph2.logging.service.LoggingService.java

@Override
public Collection<Object> getUnderlyingArtefacts() {
    return Stream
            .concat(Stream.of(this),
                    service_context.getSearchIndexService().map(Stream::of).orElse(Stream.empty()))
            .collect(Collectors.toList());
}

From source file:org.jamocha.rating.fraj.RatingProvider.java

private double rateBetaWithExistentials(final StatisticsProvider statisticsProvider,
        final PathNodeFilterSet toRate,
        final Map<Set<PathFilterList>, List<Pair<List<Set<PathFilterList>>, List<PathFilter>>>> componentToJoinOrder,
        final Map<Path, Set<PathFilterList>> pathToPreNetworkComponents) {
    final Set<Path> positiveExistentialPaths = toRate.getPositiveExistentialPaths();
    final Set<Path> negativeExistentialPaths = toRate.getNegativeExistentialPaths();
    final Set<Set<PathFilterList>> positiveExistentialComponents = new HashSet<>(),
            negativeExistentialComponents = new HashSet<>(), regularComponents = new HashSet<>();
    final Set<Set<PathFilterList>> preNetworkComponents = new HashSet<>(pathToPreNetworkComponents.values());
    for (final Set<PathFilterList> preNetworkComponent : preNetworkComponents) {
        final PathCollector<HashSet<Path>> pathCollector = PathCollector.newHashSet();
        preNetworkComponent.forEach(pathCollector::collectAllInLists);
        final HashSet<Path> paths = pathCollector.getPaths();
        if (!Collections.disjoint(paths, positiveExistentialPaths)) {
            positiveExistentialComponents.add(preNetworkComponent);
        } else if (!Collections.disjoint(paths, negativeExistentialPaths)) {
            negativeExistentialComponents.add(preNetworkComponent);
        } else {/*from  w  w w  . ja v a 2  s .  c  o  m*/
            regularComponents.add(preNetworkComponent);
        }
    }
    final Map<Set<PathFilterList>, Data> preNetworkComponentToData = preNetworkComponents.stream()
            .collect(toMap(Function.identity(), statisticsProvider::getData));
    final Map<Set<PathFilterList>, PathFilter> existentialComponentToFilter = componentToJoinOrder.values()
            .iterator().next().stream().filter(p -> !regularComponents.contains(p.getLeft().iterator().next()))
            .collect(toMap(p -> p.getLeft().iterator().next(), p -> p.getRight().iterator().next()));

    final double tupleSize = regularComponents.stream()
            .mapToDouble(c -> preNetworkComponentToData.get(c).getTupleSize()).sum();
    final double tuplesPerPage = statisticsProvider.getPageSize() / tupleSize;
    final double unfilteredRowCount = calcBetaUnfilteredSize(statisticsProvider, componentToJoinOrder,
            pathToPreNetworkComponents, regularComponents);
    final double rowCount = unfilteredRowCount * DoubleStream
            .concat(positiveExistentialComponents.stream()
                    .mapToDouble(component -> 1 - Math.pow(
                            (1 - statisticsProvider.getJSF(regularComponents, component,
                                    existentialComponentToFilter.get(component), pathToPreNetworkComponents)),
                            preNetworkComponentToData.get(component).getRowCount())),
                    negativeExistentialComponents.stream().mapToDouble(component -> Math.pow(
                            (1 - statisticsProvider.getJSF(regularComponents, component,
                                    existentialComponentToFilter.get(component), pathToPreNetworkComponents)),
                            preNetworkComponentToData.get(component).getRowCount())))
            .reduce(1.0, (a, b) -> a * b);
    final double xOverUX = rowCount / unfilteredRowCount;
    // joinsize is needed twice per component, thus pre-calculate it
    final Map<Set<PathFilterList>, Double> regularComponentToJoinSize = regularComponents.stream()
            .collect(toMap(Function.identity(),
                    component -> joinSize(statisticsProvider, component, componentToJoinOrder.get(component),
                            positiveExistentialComponents, negativeExistentialComponents,
                            pathToPreNetworkComponents, xOverUX)));
    // dnrating (30a)
    final double finsert = xOverUX
            * regularComponents.stream()
                    .mapToDouble(component -> preNetworkComponentToData.get(component).getFinsert()
                            * regularComponentToJoinSize.get(component))
                    .sum()
            + DoubleStream.concat(negativeExistentialComponents.stream().mapToDouble(component -> {
                final double jsf = statisticsProvider.getJSF(regularComponents, component,
                        existentialComponentToFilter.get(component), pathToPreNetworkComponents);
                return preNetworkComponentToData.get(component).getFdelete() * rowCount * (jsf / (1 - jsf));
            }), positiveExistentialComponents.stream()
                    .mapToDouble(component -> preNetworkComponentToData.get(component).getFinsert() * rowCount
                            * statisticsProvider.getJSF(regularComponents, component,
                                    existentialComponentToFilter.get(component), pathToPreNetworkComponents)))
                    .sum();
    // dnrating (30b)
    final double fdelete = DoubleStream.concat(
            regularComponents.stream().mapToDouble(c -> preNetworkComponentToData.get(c).getFdelete()),
            DoubleStream.concat(negativeExistentialComponents.stream()
                    .mapToDouble(component -> preNetworkComponentToData.get(component).getFdelete() * rowCount
                            * statisticsProvider.getJSF(regularComponents, component,
                                    existentialComponentToFilter.get(component), pathToPreNetworkComponents)),
                    positiveExistentialComponents.stream().mapToDouble(component -> {
                        final double jsf = statisticsProvider.getJSF(regularComponents, component,
                                existentialComponentToFilter.get(component), pathToPreNetworkComponents);
                        return preNetworkComponentToData.get(component).getFinsert() * rowCount
                                * (jsf / (1 - jsf));
                    })))
            .sum();
    // publish information to statistics provider
    {
        final Set<PathFilterList> filters = new HashSet<>();
        componentToJoinOrder.keySet().forEach(filters::addAll);
        filters.add(toRate);
        statisticsProvider.setData(filters, new Data(finsert, fdelete, rowCount, tupleSize));
    }
    final double mUxBeta = m(unfilteredRowCount, tuplesPerPage);
    // dnrating (40)
    final double runtimeCost = DoubleStream.concat(regularComponents.stream().mapToDouble(component -> {
        final Data data = preNetworkComponentToData.get(component);
        return data.getFinsert()
                * costPosInsVarII(statisticsProvider, component, componentToJoinOrder.get(component),
                        regularComponents, pathToPreNetworkComponents)
                + data.getFdelete() * (mUxBeta + cardenas(mUxBeta, regularComponentToJoinSize.get(component)));
    }), Stream.concat(positiveExistentialComponents.stream(), negativeExistentialComponents.stream())
            .mapToDouble(component -> {
                final Data data = preNetworkComponentToData.get(component);
                return data.getFinsert() * 2
                        * jc(statisticsProvider, statisticsProvider.getJSF(regularComponents, component,
                                existentialComponentToFilter.get(component), pathToPreNetworkComponents), data,
                                1)
                        + data.getFdelete() * costNegDelVarII(statisticsProvider, component,
                                componentToJoinOrder.get(component), pathToPreNetworkComponents);
            })).sum();
    final double memoryCost = unfilteredRowCount * (tupleSize
            + 0.15 * (positiveExistentialComponents.size() + negativeExistentialComponents.size()));
    return cpuAndMemCostCombiner.applyAsDouble(runtimeCost, memoryCost);
}