Example usage for java.util.stream Stream parallel

List of usage examples for java.util.stream Stream parallel

Introduction

In this page you can find the example usage for java.util.stream Stream parallel.

Prototype

S parallel();

Source Link

Document

Returns an equivalent stream that is parallel.

Usage

From source file:org.ow2.proactive.scheduling.api.graphql.fetchers.JobDataFetcher.java

@Override
protected Stream<Job> dataMapping(Stream<JobData> dataStream) {
    return dataStream.parallel()
            .map(jobData -> Job.builder()
                    .dataManagement(DataManagement.builder().globalSpaceUrl(jobData.getGlobalSpace())
                            .inputSpaceUrl(jobData.getInputSpace()).outputSpaceUrl(jobData.getOutputSpace())
                            .userSpaceUrl(jobData.getUserSpace()).build())
                    .description(jobData.getDescription()).finishedTime(jobData.getFinishedTime())
                    .genericInformation(jobData.getGenericInformation()).id(jobData.getId())
                    .inErrorTime(jobData.getInErrorTime()).lastUpdatedTime(jobData.getLastUpdatedTime())
                    .maxNumberOfExecution(jobData.getMaxNumberOfExecution()).name(jobData.getJobName())
                    .numberOfFailedTasks(jobData.getNumberOfFailedTasks())
                    .numberOfFaultyTasks(jobData.getNumberOfFaultyTasks())
                    .numberOfFinishedTasks(jobData.getNumberOfFinishedTasks())
                    .numberOfInErrorTasks(jobData.getNumberOfInErrorTasks())
                    .numberOfPendingTasks(jobData.getNumberOfPendingTasks())
                    .numberOfRunningTasks(jobData.getNumberOfRunningTasks())
                    .onTaskError(CaseFormat.UPPER_CAMEL.to(CaseFormat.UPPER_UNDERSCORE,
                            jobData.getOnTaskErrorString()))
                    .owner(jobData.getOwner()).priority(jobData.getPriority().name())
                    .projectName(jobData.getProjectName()).removedTime(jobData.getRemovedTime())
                    .status(jobData.getStatus().name()).startTime(jobData.getStartTime())
                    .submittedTime(jobData.getSubmittedTime())
                    .totalNumberOfTasks(jobData.getTotalNumberOfTasks())
                    // TODO Currently map the JobVariable object to a simple string (its value). Need to map the whole object later
                    .variables(jobData.getVariables() == null ? ImmutableMap.of()
                            : jobData.getVariables().entrySet().stream()
                                    .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue().getValue())))
                    .build());//from  www  . j  av  a 2  s. c  om
}

From source file:uk.ac.ebi.ep.parser.parsers.EnzymePortalPDBeParser.java

public void updatePDBeData() {

    Set<UniprotXref> entries = new HashSet<>();

    List<UniprotXref> pdbIds = parserService.findPDBcodes();
    LOGGER.info("Number of PDB entries to process : " + pdbIds.size());

    //int est = pdbIds.size()/500;

    Stream<UniprotXref> existingStream = pdbIds.stream();
    Stream<List<UniprotXref>> partitioned = partition(existingStream, 500, 1);
    AtomicInteger count = new AtomicInteger(1);
    partitioned.parallel().forEach((chunk) -> {

        chunk.stream().forEach((pdb) -> {
            //obtain a concrete pdb entry
            PdbSearchResult results = pdbService.getPdbSearchResults(pdb.getSourceId().toLowerCase());

            if (results != null) {
                List<PDBe> result = results.get(pdb.getSourceId().toLowerCase());
                String title = result.stream().findAny().get().getTitle();
                pdb.setSourceName(title);
                entries.add(pdb);//from w  ww  . ja v a  2 s  .  co m
            }
        });
    });

    //        AtomicInteger count = new AtomicInteger(1);
    //        pdbIds.stream().forEach((pdb) -> {
    //            //obtain a concrete pdb entry
    //            PdbSearchResult results = pdbService.getPdbSearchResults(pdb.getSourceId().toLowerCase());
    //           // System.out.println("count "+count.getAndIncrement() +" PDB id "+ pdb.getSourceId());
    //            if (results != null) {
    //                List<PDBe> result = results.get(pdb.getSourceId().toLowerCase());
    //                String title = result.stream().findAny().get().getTitle();
    //                pdb.setSourceName(title);
    //                entries.add(pdb);
    //            }
    //        });

    //update entry
    List<UniprotXref> pdbEntries = entries.stream().collect(Collectors.toList());

    List<UniprotXref> updatedEntries = parserService.updatePDB(pdbEntries);
    LOGGER.info("Number of PDB entries updated are : " + updatedEntries.size());
    updatedEntries.clear();
}

From source file:com.github.lukaszbudnik.gugis.GugisReplicatorInterceptor.java

public Stream<Try<Object>> executeBindings(boolean allowFailure, Stream<Binding<Object>> bindings,
        String methodName, Object[] arguments) {
    Stream<Try<Object>> executedBindingsStream = bindings.parallel().map(binding -> {
        try {//ww  w  . j  a  v a2s. c om
            Object component = binding.getProvider().get();
            return new Success<Object>(MethodUtils.invokeMethod(component, methodName, arguments));
        } catch (InvocationTargetException e) {
            if (!allowFailure) {
                // pass the original exception thrown
                throw new GugisException(e.getCause());
            }
            return new Failure<Object>(e.getCause());
        } catch (NoSuchMethodException | IllegalAccessException e) {
            throw new GugisException(e);
        }
    });
    return executedBindingsStream;
}

From source file:com.newtranx.util.mysql.fabric.SpringQueryAllShardsAspect.java

@Around("@annotation(com.newtranx.util.mysql.fabric.QueryAllShards)")
public Object union(ProceedingJoinPoint pjp) throws Throwable {
    Method method = AspectJUtils.getMethod(pjp);
    QueryAllShards annotation = method.getAnnotation(QueryAllShards.class);
    String table = annotation.table();
    log.debug("Table=" + table);
    Set<String> groups = groupsCache.get(cacheKey);
    log.debug("ServerGroups=" + groups);
    List<Object> list;
    boolean readOnly = annotation.readOnly();
    Pattern excludePattern;/*  w w  w.j ava2 s.  c o m*/
    String excludeRegex = annotation.excludeShardsPatternRegex();
    if (!StringUtils.isEmpty(excludeRegex)) {
        excludePattern = Pattern.compile(excludeRegex);
    } else {
        excludePattern = null;
    }

    Function<Boolean, List<Object>> computeFunction = (par) -> {
        Stream<String> stream = groups.stream();
        if (par)
            stream = stream.parallel();
        return stream.filter(gp -> {
            boolean exclude = excludePattern != null && excludePattern.matcher(gp).matches();
            if (exclude) {
                log.debug("Skipping group:" + gp);
            }
            return !exclude;
        }).map(gp -> {
            log.debug("Querying group: " + gp);
            ds.whenNewConnection().doInit(conn -> conn.setServerGroupName(gp))
                    .doInit(conn -> conn.setReadOnly(readOnly));
            try {
                return pjp.proceed();
            } catch (Throwable t) {
                throw Exceptions.propagate(t);
            } finally {
                ds.clearInitOps();
            }
        }).collect(Collectors.toList());
    };

    if (StringUtils.isEmpty(annotation.parallelPool())) {
        list = computeFunction.apply(false);
    } else {
        ForkJoinPool pool;
        if ("!jdkCommon".equals(annotation.parallelPool()))
            pool = ForkJoinPool.commonPool();
        else
            pool = applicationContext.getBean(annotation.parallelPool(), ForkJoinPool.class);
        log.debug("Executing queries in parallel, pool=" + pool);
        list = pool.submit(() -> {
            return computeFunction.apply(true);
        }).get();
    }
    Aggregator aggregator;
    try {
        aggregator = (Aggregator) annotation.aggregator().getDeclaredMethod("getInstance", EMPTY_PARAM)
                .invoke(null, EMPTY_ARGS);
    } catch (Exception e) {
        log.warn("Can not get singleton for class " + annotation.aggregator().getName()
                + ", creating new instance");
        aggregator = annotation.aggregator().newInstance();
    }
    return aggregator.apply(list);
}

From source file:com.github.pjungermann.config.specification.DefaultConfigSpecificationLoader.java

@NotNull
@Override//from   ww  w  . j a  v  a 2 s  .c o m
public ConfigSpecification load(boolean recursive, @NotNull final Stream<File> sourceStream) {
    final List<Constraint> constraints = new ArrayList<>();
    final List<Constraint> syncConstraints = synchronizedList(constraints);
    final List<ConfigError> errors = new ArrayList<>();
    final List<ConfigError> syncErrors = synchronizedList(errors);

    sourceStream.parallel().filter(file -> {
        if (file.exists()) {
            return true;
        }

        syncErrors.add(new NoSuchFileError(file));
        return false;
    }).flatMap(new FilesResolver(recursive)).map(specificationReader).forEach(partial -> {
        syncConstraints.addAll(partial.constraints);
        syncErrors.addAll(partial.errors);
    });

    return new ConfigSpecification(typeConverter, constraints, errors);
}

From source file:uk.ac.ebi.ep.parser.parsers.ChEBICompounds.java

public void computeAndLoadChEBICompounds() {

    List<EnzymePortalSummary> enzymeSummary = enzymeSummaryRepository.findSummariesByCommentType(COMMENT_TYPE);
    LOGGER.warn("Number of Regulation Text from EnzymeSummary Table " + enzymeSummary.size());

    //String text = "Activated by cell stresses such as DNA damage, heat shock, osmotic shock, anisomycin and sodium arsenite, as well as pro-inflammatory stimuli such as bacterial lipopolysaccharide (LPS) and interleukin-1. Activation occurs through dual phosphorylation of Thr-180 and Tyr-182 by either of two dual specificity kinases, MAP2K3/MKK3 or MAP2K6/MKK6, and potentially also MAP2K4/MKK4, as well as by TAB1-mediated autophosphorylation. MAPK14 phosphorylated on both Thr-180 and Tyr-182 is 10-20-fold more active than MAPK14 phosphorylated only on Thr-180, whereas MAPK14 phosphorylated on Tyr-182 alone is inactive. whereas Thr-180 is necessary for catalysis, Tyr-182 may be required for auto-activation and substrate recognition. Phosphorylated at Tyr-323 by ZAP70 in an alternative activation pathway in response to TCR signaling in T-cells. This alternative pathway is inhibited by GADD45A. Inhibited by dual specificity phosphatases, such as DUSP1, DUSP10, and DUSP16. Specifically inhibited by the binding of pyridinyl-imidazole compounds, which are cytokine-suppressive anti-inflammatory drugs (CSAID). Isoform Mxi2 is 100-fold less sensitive to these agents than the other isoforms and is not inhibited by DUSP1. Isoform Exip is not activated by MAP2K6. SB203580 is an inhibitor of MAPK14.";   
    //Java 7 and before only. uncomment if Java 8 is not available in your env
    //        for (EnzymePortalSummary summary : enzymeSummary) {
    //            String enzyme_regulation_text = summary.getCommentText();
    //            
    //         //from  w w w.j a v a  2 s.c  o m
    //            inhibitors.put(summary.getUniprotAccession(), EPUtil.parseTextForInhibitors(enzyme_regulation_text));
    //            activators.put(summary.getUniprotAccession(), EPUtil.parseTextForActivators(enzyme_regulation_text));
    //        }
    //        
    //
    //        for (Map.Entry<UniprotEntry, Set<String>> map : inhibitors.entrySet()) {
    //            UniprotEntry key = map.getKey();
    //            for (String inhibitor : map.getValue()) {
    //                EnzymePortalCompound inhibitor_from_chebi = searchMoleculeInChEBI(inhibitor);
    //               
    //                if (inhibitor_from_chebi != null) {
    //                    
    //                    inhibitor_from_chebi.setRelationship(Relationship.is_inhibitor_of.name());
    //                    inhibitor_from_chebi.setUniprotAccession(key);
    //                    compounds.add(inhibitor_from_chebi);
    //                }
    //            }
    //
    //        }
    //
    //        for (Map.Entry<UniprotEntry, Set<String>> map : activators.entrySet()) {
    //            UniprotEntry key = map.getKey();
    //            for (String activator : map.getValue()) {
    //                EnzymePortalCompound activator_from_chebi = searchMoleculeInChEBI(activator);
    //                if (activator_from_chebi != null) {
    //                 
    //                    activator_from_chebi.setRelationship(Relationship.is_activator_of.name());
    //                    activator_from_chebi.setUniprotAccession(key);
    //                    compounds.add(activator_from_chebi);
    //                }
    //            }
    //
    //        }
    //Java 8 specifics - comment out  and uncomment above if java 8 is not found in env
    //        enzymeSummary.stream().forEach((summary) -> {
    //            String enzyme_regulation_text = summary.getCommentText();
    //            inhibitors.put(summary.getUniprotAccession(), EPUtil.parseTextForInhibitors(enzyme_regulation_text));
    //            activators.put(summary.getUniprotAccession(), EPUtil.parseTextForActivators(enzyme_regulation_text));
    //        });

    Stream<EnzymePortalSummary> existingStream = enzymeSummary.stream();
    Stream<List<EnzymePortalSummary>> partitioned = partition(existingStream, 500, 1);
    AtomicInteger count = new AtomicInteger(1);
    partitioned.parallel().forEach((chunk) -> {
        //System.out.println(count.getAndIncrement() + " BATCH SIZE" + chunk.size());
        chunk.stream().forEach((summary) -> {
            String enzyme_regulation_text = summary.getCommentText();

            inhibitors.put(summary.getUniprotAccession(),
                    EPUtil.parseTextForInhibitors(enzyme_regulation_text));
            activators.put(summary.getUniprotAccession(),
                    EPUtil.parseTextForActivators(enzyme_regulation_text));

        });
    });

    LOGGER.debug("number of inhibitors and activators to process are : " + inhibitors.size() + ": "
            + activators.size());
    inhibitors.entrySet().stream().forEach((map) -> {
        map.getValue().stream().map((inhibitor) -> searchMoleculeInChEBI(inhibitor))
                .filter((inhibitor_from_chebi) -> (inhibitor_from_chebi != null))
                .map((inhibitor_from_chebi) -> {
                    inhibitor_from_chebi.setRelationship(Relationship.is_inhibitor_of.name());
                    inhibitor_from_chebi = CompoundUtil.computeRole(inhibitor_from_chebi,
                            inhibitor_from_chebi.getRelationship());
                    return inhibitor_from_chebi;
                }).map((inhibitor_from_chebi) -> {
                    inhibitor_from_chebi.setUniprotAccession(map.getKey());
                    return inhibitor_from_chebi;
                }).forEach((inhibitor_from_chebi) -> {
                    compounds.add(inhibitor_from_chebi);
                });
    });

    activators.entrySet().stream().forEach((map) -> {
        map.getValue().stream().map((activator) -> searchMoleculeInChEBI(activator))
                .filter((activator_from_chebi) -> (activator_from_chebi != null))
                .map((activator_from_chebi) -> {
                    activator_from_chebi.setRelationship(Relationship.is_activator_of.name());
                    activator_from_chebi = CompoundUtil.computeRole(activator_from_chebi,
                            activator_from_chebi.getRelationship());
                    return activator_from_chebi;
                }).map((activator_from_chebi) -> {
                    activator_from_chebi.setUniprotAccession(map.getKey());
                    return activator_from_chebi;
                }).forEach((activator_from_chebi) -> {
                    compounds.add(activator_from_chebi);
                });
    });

    LOGGER.warn("Number of compounds before filtering : " + compounds.size());

    compounds.removeIf(c -> (c.getCompoundId().equalsIgnoreCase("CHEBI:338412")
            || c.getCompoundId().equalsIgnoreCase("CHEBI:16412")
            || c.getCompoundId().equalsIgnoreCase("CHEBI:29678"))
            && c.getUniprotAccession().getAccession().equalsIgnoreCase("Q16539"));

    LOGGER.warn("Writing to Enzyme Portal database... Number of compounds to write : " + compounds.size());

    compoundRepository.save(compounds);

    inhibitors.clear();
    activators.clear();
    compounds.clear();
}

From source file:com.github.jackygurui.vertxredissonrepository.repository.SaveAndSearchAndGetCallInConcurrentTest.java

@Test
public void test2SaveAndSearchAndGetCallIn(TestContext context) throws Exception {
    Async async = context.async();//from   w  w w . jav  a 2  s .  co  m
    JsonNode source = JsonLoader.fromResource("/CallIn.json");
    int records = 1000;
    AtomicLong total = new AtomicLong(0);
    ConcurrentHashMap<JsonObject, String> m = new ConcurrentHashMap<>();
    Stream<JsonObject> stream = IntStream.rangeClosed(0, records).mapToObj(e -> {
        JsonObject clone = new JsonObject(Json.encode(source));
        Long number = Long.parseLong(clone.getString("phoneNumber")) + e;
        clone.put("phoneNumber", number + "");
        Long callTime = clone.getLong("callTime") + e;
        clone.put("callTime", callTime);
        return clone;
    });
    StopWatch sw = new StopWatch();
    sw.start();
    stream.parallel().forEach(e -> {
        org.simondean.vertx.async.Async.waterfall().<String>task(t -> {
            callInRepository.create(Json.encode(e), t);
        }).<List<CallIn>>task((id, t) -> {
            m.put(e, id);
            AtomicLong idc = new AtomicLong(0);
            org.simondean.vertx.async.Async.retry().<List<CallIn>>task(tt -> {
                callInRepository.searchIndexByScoreAndGet("callTime", e.getDouble("callTime"),
                        e.getDouble("callTime"), 0, 1, ttt -> {
                            logger.info("id = " + id + " | retry count: " + idc.incrementAndGet());
                            tt.handle(ttt.succeeded() && ttt.result() != null && !ttt.result().isEmpty()
                                    ? Future.succeededFuture(ttt.result())
                                    : Future.failedFuture(ttt.cause()));
                        });
            }).times(100000).run(t);
        }).run(r -> {
            context.assertTrue(r.succeeded());
            if (r.succeeded()) {
                context.assertFalse(r.result().isEmpty());
                context.assertEquals(1, r.result().size());
                CallIn ci = r.result().iterator().next();
                context.assertNotNull(ci);
                logger.info(Json.encode(ci));
                CallIn cii = Json.decodeValue(e.put("id", m.get(e)).encode(), CallIn.class);
                context.assertEquals(Json.encode(cii), Json.encode(ci));
            }
            long t;
            if ((t = total.incrementAndGet()) == records) {
                sw.stop();
                logger.info("time to concurrently save and search and get " + records + " call in records: "
                        + sw.getTime());
                async.complete();
            } else {
                logger.info("t = " + t);
            }
        });
    });

}

From source file:com.spotify.styx.api.BackfillResource.java

public BackfillsPayload getBackfills(RequestContext rc) {
    final Optional<String> componentOpt = rc.request().parameter("component");
    final Optional<String> workflowOpt = rc.request().parameter("workflow");
    final boolean includeStatuses = rc.request().parameter("status").orElse("false").equals("true");
    final boolean showAll = rc.request().parameter("showAll").orElse("false").equals("true");

    final Stream<Backfill> backfills;
    try {//from  ww  w  .  ja  v a  2  s. com
        if (componentOpt.isPresent() && workflowOpt.isPresent()) {
            final WorkflowId workflowId = WorkflowId.create(componentOpt.get(), workflowOpt.get());
            backfills = storage.backfillsForWorkflowId(showAll, workflowId).stream();
        } else if (componentOpt.isPresent()) {
            final String component = componentOpt.get();
            backfills = storage.backfillsForComponent(showAll, component).stream();
        } else if (workflowOpt.isPresent()) {
            final String workflow = workflowOpt.get();
            backfills = storage.backfillsForWorkflow(showAll, workflow).stream();
        } else {
            backfills = storage.backfills(showAll).stream();
        }
    } catch (IOException e) {
        throw Throwables.propagate(e);
    }

    final List<BackfillPayload> backfillPayloads = backfills.parallel()
            .map(backfill -> BackfillPayload.create(backfill,
                    includeStatuses
                            ? Optional.of(RunStateDataPayload.create(retrieveBackfillStatuses(backfill)))
                            : Optional.empty()))
            .collect(toList());

    return BackfillsPayload.create(backfillPayloads);
}

From source file:nova.core.util.RayTracer.java

public Stream<RayTraceBlockResult> rayTraceBlocks(Stream<Block> blockStream) {
    return (doParallel() ? blockStream.parallel() : blockStream)
            .filter(block -> block.components.has(Collider.class))
            .flatMap(block -> rayTraceCollider(block, (pos, cuboid) -> new RayTraceBlockResult(pos,
                    ray.origin.distance(pos), cuboid.sideOf(pos), cuboid, block)))
            .sorted();/*from  www .  ja v a 2 s  .co  m*/
}

From source file:nova.core.util.RayTracer.java

public Stream<RayTraceEntityResult> rayTraceEntities(Stream<Entity> entityStream) {
    return (doParallel() ? entityStream.parallel() : entityStream)
            .filter(entity -> entity.components.has(Collider.class))
            .flatMap(entity -> rayTraceCollider(entity, (pos, cuboid) -> new RayTraceEntityResult(pos,
                    ray.origin.distance(pos), cuboid.sideOf(pos), cuboid, entity)))
            .sorted();/*from   w  w w . j a  v a 2 s  .c o m*/
}