Example usage for java.util.stream IntStream range

List of usage examples for java.util.stream IntStream range

Introduction

In this page you can find the example usage for java.util.stream IntStream range.

Prototype

public static IntStream range(int startInclusive, int endExclusive) 

Source Link

Document

Returns a sequential ordered IntStream from startInclusive (inclusive) to endExclusive (exclusive) by an incremental step of 1 .

Usage

From source file:org.apache.tinkerpop.gremlin.groovy.jsr223.GremlinGroovyScriptEngineTest.java

@Test
public void shouldAllowVariableReuseAcrossThreads() throws Exception {
    final BasicThreadFactory testingThreadFactory = new BasicThreadFactory.Builder()
            .namingPattern("test-gremlin-scriptengine-%d").build();
    final ExecutorService service = Executors.newFixedThreadPool(8, testingThreadFactory);
    final GremlinGroovyScriptEngine scriptEngine = new GremlinGroovyScriptEngine();

    final AtomicBoolean failed = new AtomicBoolean(false);
    final int max = 512;
    final List<Pair<Integer, List<Integer>>> futures = Collections.synchronizedList(new ArrayList<>(max));
    IntStream.range(0, max).forEach(i -> {
        final int yValue = i * 2;
        final int zValue = i * -1;
        final Bindings b = new SimpleBindings();
        b.put("x", i);
        b.put("y", yValue);

        final String script = "z=" + zValue + ";[x,y,z]";
        try {/*  ww  w . j  a  v  a2 s.  c  o m*/
            service.submit(() -> {
                try {
                    final List<Integer> result = (List<Integer>) scriptEngine.eval(script, b);
                    futures.add(Pair.with(i, result));
                } catch (Exception ex) {
                    failed.set(true);
                }
            });
        } catch (Exception ex) {
            throw new RuntimeException(ex);
        }
    });

    service.shutdown();
    assertThat(service.awaitTermination(120000, TimeUnit.MILLISECONDS), is(true));

    // likely a concurrency exception if it occurs - and if it does then we've messed up because that's what this
    // test is partially designed to protected against.
    assertThat(failed.get(), is(false));
    assertEquals(max, futures.size());
    futures.forEach(t -> {
        assertEquals(t.getValue0(), t.getValue1().get(0));
        assertEquals(t.getValue0() * 2, t.getValue1().get(1).intValue());
        assertEquals(t.getValue0() * -1, t.getValue1().get(2).intValue());
    });
}

From source file:io.soabase.halva.processor.caseclass.Templates.java

private void addClassTupleMethods(CaseClassSpec spec, TypeSpec.Builder builder, ClassName className,
        Optional<List<TypeVariableName>> typeVariableNames) {
    Optional<Class<? extends Tuple>> optionalTupleClass = Tuple.getTupleClass(spec.getItems().size());
    if (!optionalTupleClass.isPresent()) {
        return;/*from w w  w  . j  a v  a2 s .co m*/
    }

    ClassName anyClassName = ClassName.get(Any.class);
    ClassName matchClassName = ClassName.get(AnyVal.class);
    ClassName anyClassTupleName = ClassName.get(AnyClassTuple.class);
    TypeName localCaseClassName = getLocalCaseClassName(className, typeVariableNames);
    TypeName classTupleClassName = ParameterizedTypeName.get(anyClassTupleName, localCaseClassName);

    CodeBlock.Builder returnCode = CodeBlock.builder().add("return new $T($T.Tu(", classTupleClassName,
            Tuple.class);
    IntStream.range(0, spec.getItems().size()).forEach(i -> {
        CaseClassItem item = spec.getItems().get(i);
        if (i > 0) {
            returnCode.add(", ");
        }
        returnCode.add("$T.loose($L)", anyClassName, item.getName());
    });
    spec.getItems().forEach(item -> {
    });
    returnCode.addStatement(")){}");

    MethodSpec.Builder tupleMethod = MethodSpec.methodBuilder(getClassTupleMethodName(className))
            .returns(classTupleClassName).addCode(returnCode.build())
            .addModifiers(Modifier.PUBLIC, Modifier.STATIC);
    spec.getItems().forEach(item -> {
        TypeName mainType = null;
        if (item.getType().getKind() == TypeKind.DECLARED) {
            DeclaredType declaredType = (DeclaredType) item.getType();
            List<? extends TypeMirror> typeArguments = declaredType.getTypeArguments();
            if (typeArguments.size() > 0) {
                TypeName[] typeNames = new TypeName[typeArguments.size()];
                for (int i = 0; i < typeArguments.size(); ++i) {
                    typeNames[i] = WildcardTypeName.subtypeOf(TypeName.get(typeArguments.get(i)).box());
                }
                mainType = ParameterizedTypeName.get(ClassName.get((TypeElement) declaredType.asElement()),
                        typeNames);
            }
        }
        if (mainType == null) {
            mainType = TypeName.get(item.getType()).box();
        }
        TypeName wildcareType = WildcardTypeName.subtypeOf(mainType);
        ParameterizedTypeName type = ParameterizedTypeName.get(matchClassName, wildcareType);
        tupleMethod.addParameter(type, item.getName());
    });

    if (typeVariableNames.isPresent()) {
        tupleMethod.addTypeVariables(typeVariableNames.get());
    }

    builder.addMethod(tupleMethod.build());
}

From source file:delfos.rs.trustbased.WeightedGraph.java

private static void validateWeightsGraph(AdjMatrixEdgeWeightedDigraph adjMatrixEdgeWeightedDigraph) {

    List<DirectedEdge> allEdges = IntStream.range(0, adjMatrixEdgeWeightedDigraph.V()).boxed().parallel()
            .map(vertex -> {//w ww .ja  v a 2 s. c  o m
                Iterable<DirectedEdge> iterator = adjMatrixEdgeWeightedDigraph.adj(vertex);
                ArrayList<DirectedEdge> listOfEdges = new ArrayList<>();
                for (DirectedEdge edge : iterator) {
                    listOfEdges.add(edge);
                }
                return listOfEdges;
            }).flatMap(listOfEdges -> listOfEdges.parallelStream()).collect(Collectors.toList());

    List<DirectedEdge> badEdges = allEdges.parallelStream()
            .filter(edge -> (edge.weight() < 0) || (edge.weight() > 1)).collect(Collectors.toList());

    if (!badEdges.isEmpty()) {
        System.out.println("List of bad edges:");
        badEdges.forEach(edge -> System.out.println("\t" + edge));
        throw new IllegalStateException("arg");
    }

}

From source file:org.broadinstitute.gatk.tools.walkers.genotyper.afcalc.AlleleFrequencyCalculator.java

/**
 * Compute the probability of the alleles segregating given the genotype likelihoods of the samples in vc
 *
 * @param vc the VariantContext holding the alleles and sample information.  The VariantContext
 *           must have at least 1 alternative allele
 * @param refSnpIndelPseudocounts a total hack.  A length-3 vector containing Dirichlet prior pseudocounts to
 *                                be given to ref, alt SNP, and alt indel alleles.  Hack won't be necessary when we destroy the old AF calculators
 * @return result (for programming convenience)
 *//*from  ww w  .  j  a v a2 s .c  om*/
@Override
public AFCalculationResult getLog10PNonRef(final VariantContext vc, final int defaultPloidy,
        final int maximumAlternativeAlleles, final double[] refSnpIndelPseudocounts) {
    Utils.nonNull(vc, "vc is null");
    final int numAlleles = vc.getNAlleles();
    final List<Allele> alleles = vc.getAlleles();
    Utils.validateArg(numAlleles > 1,
            "VariantContext has only a single reference allele, but getLog10PNonRef requires at least one at all "
                    + vc);

    final double[] priorPseudocounts = alleles.stream().mapToDouble(
            a -> a.isReference() ? refPseudocount : (a.length() > 1 ? snpPseudocount : indelPseudocount))
            .toArray();

    double[] alleleCounts = new double[numAlleles];
    final double flatLog10AlleleFrequency = -MathUtils.Log10Cache.get(numAlleles); // log10(1/numAlleles)
    double[] log10AlleleFrequencies = new IndexRange(0, numAlleles).mapToDouble(n -> flatLog10AlleleFrequency);
    double alleleCountsMaximumDifference = Double.POSITIVE_INFINITY;

    while (alleleCountsMaximumDifference > THRESHOLD_FOR_ALLELE_COUNT_CONVERGENCE) {
        final double[] newAlleleCounts = effectiveAlleleCounts(vc, log10AlleleFrequencies);
        alleleCountsMaximumDifference = Arrays.stream(MathArrays.ebeSubtract(alleleCounts, newAlleleCounts))
                .map(Math::abs).max().getAsDouble();
        alleleCounts = newAlleleCounts;
        final double[] posteriorPseudocounts = MathArrays.ebeAdd(priorPseudocounts, alleleCounts);

        // first iteration uses flat prior in order to avoid local minimum where the prior + no pseudocounts gives such a low
        // effective allele frequency that it overwhelms the genotype likelihood of a real variant
        // basically, we want a chance to get non-zero pseudocounts before using a prior that's biased against a variant
        log10AlleleFrequencies = new Dirichlet(posteriorPseudocounts).log10MeanWeights();
    }

    double[] log10POfZeroCountsByAllele = new double[numAlleles];
    double log10PNoVariant = 0;

    for (final Genotype g : vc.getGenotypes()) {
        if (!g.hasLikelihoods()) {
            continue;
        }
        final int ploidy = g.getPloidy() == 0 ? defaultPloidy : g.getPloidy();
        final GenotypeLikelihoodCalculator glCalc = GL_CALCS.getInstance(ploidy, numAlleles);

        final double[] log10GenotypePosteriors = log10NormalizedGenotypePosteriors(g, glCalc,
                log10AlleleFrequencies);

        //the total probability
        log10PNoVariant += log10GenotypePosteriors[HOM_REF_GENOTYPE_INDEX];

        // per allele non-log space probabilities of zero counts for this sample
        // for each allele calculate the total probability of genotypes containing at least one copy of the allele
        final double[] log10ProbabilityOfNonZeroAltAlleles = new double[numAlleles];
        Arrays.fill(log10ProbabilityOfNonZeroAltAlleles, Double.NEGATIVE_INFINITY);

        for (int genotype = 0; genotype < glCalc.genotypeCount(); genotype++) {
            final double log10GenotypePosterior = log10GenotypePosteriors[genotype];
            glCalc.genotypeAlleleCountsAt(genotype)
                    .forEachAlleleIndexAndCount((alleleIndex,
                            count) -> log10ProbabilityOfNonZeroAltAlleles[alleleIndex] = MathUtils
                                    .log10SumLog10(log10ProbabilityOfNonZeroAltAlleles[alleleIndex],
                                            log10GenotypePosterior));
        }

        for (int allele = 0; allele < numAlleles; allele++) {
            // if prob of non hom ref == 1 up to numerical precision, short-circuit to avoid NaN
            if (log10ProbabilityOfNonZeroAltAlleles[allele] >= 0) {
                log10POfZeroCountsByAllele[allele] = Double.NEGATIVE_INFINITY;
            } else {
                log10POfZeroCountsByAllele[allele] += MathUtils
                        .log10OneMinusPow10(log10ProbabilityOfNonZeroAltAlleles[allele]);
            }
        }
    }

    // unfortunately AFCalculationResult expects integers for the MLE.  We really should emit the EM no-integer values
    // which are valuable (eg in CombineGVCFs) as the sufficient statistics of the Dirichlet posterior on allele frequencies
    final int[] integerAlleleCounts = Arrays.stream(alleleCounts).mapToInt(x -> (int) Math.round(x)).toArray();
    final int[] integerAltAlleleCounts = Arrays.copyOfRange(integerAlleleCounts, 1, numAlleles);

    //skip the ref allele (index 0)
    final Map<Allele, Double> log10PRefByAllele = IntStream.range(1, numAlleles).boxed()
            .collect(Collectors.toMap(alleles::get, a -> log10POfZeroCountsByAllele[a]));

    // we compute posteriors here and don't have the same prior that AFCalculationResult expects.  Therefore, we
    // give it our posterior as its "likelihood" along with a flat dummy prior
    final double[] dummyFlatPrior = { -1e-10, -1e-10 }; //TODO: HACK must be negative for AFCalcResult
    final double[] log10PosteriorOfNoVariantYesVariant = { log10PNoVariant,
            MathUtils.log10OneMinusPow10(log10PNoVariant) };

    return new AFCalculationResult(integerAltAlleleCounts, DUMMY_N_EVALUATIONS, alleles,
            log10PosteriorOfNoVariantYesVariant, dummyFlatPrior, log10PRefByAllele);
}

From source file:org.apache.tinkerpop.gremlin.groovy.engine.GremlinExecutorTest.java

@Test
public void shouldNotExhaustThreads() throws Exception {
    final ScheduledExecutorService executorService = Executors.newScheduledThreadPool(2, testingThreadFactory);
    final GremlinExecutor gremlinExecutor = GremlinExecutor.build().executorService(executorService)
            .scheduledExecutorService(executorService).create();

    final AtomicInteger count = new AtomicInteger(0);
    assertTrue(IntStream.range(0, 1000).mapToObj(i -> gremlinExecutor.eval("1+1")).allMatch(f -> {
        try {/*from   w w  w. j a  v a 2s . c om*/
            return (Integer) f.get() == 2;
        } catch (Exception ex) {
            throw new RuntimeException(ex);
        } finally {
            count.incrementAndGet();
        }
    }));

    assertEquals(1000, count.intValue());

    executorService.shutdown();
    executorService.awaitTermination(30000, TimeUnit.MILLISECONDS);
}

From source file:com.simiacryptus.mindseye.lang.Tensor.java

/**
 * Reduce tensor./*from w w  w  . j a va  2  s  .c o m*/
 *
 * @return the tensor
 */
@Nonnull
public Tensor sumChannels() {
    int[] dimensions = getDimensions();
    Tensor self = this;
    return new Tensor(dimensions[0], dimensions[1], 1).setByCoord(c -> {
        int[] coords = c.getCoords();
        return IntStream.range(0, dimensions[2]).mapToDouble(j -> self.get(coords[0], coords[1], j)).sum();
    });
}

From source file:me.ixfan.wechatkit.user.UserManager.java

/**
 * ?//from   ww  w  . ja v  a 2 s  .  c  o  m
 *
 * @param openId ?OpenID
 * @return ID
 * @throws WeChatApiErrorException API??
 */
public int[] getTagsOfUser(String openId) throws WeChatApiErrorException {
    Args.notEmpty(openId, "OpenID");

    final String url = WeChatConstants.WECHAT_POST_GET_TAGS_OF_USER.replace("${ACCESS_TOKEN}",
            super.tokenManager.getAccessToken());
    final String jsonData = "{\"openid\":\"${OPENID}\"}";
    JsonObject jsonResp;
    try {
        jsonResp = HttpClientUtil.sendPostRequestWithJsonBody(url, jsonData.replace("${OPENID}", openId));
    } catch (IOException e) {
        throw new RuntimeException(e);
    }

    if (jsonResp.has("tagid_list")) {
        JsonArray jsonArray = jsonResp.getAsJsonArray("tagid_list");
        int[] tagIds = new int[jsonArray.size()];
        if (jsonArray.size() > 0) {
            IntStream.range(0, jsonArray.size()).forEach(i -> tagIds[i] = jsonArray.get(i).getAsInt());
        }
        return tagIds;
    } else {
        throw new WeChatApiErrorException(jsonResp.get("errcode").getAsInt(),
                jsonResp.get("errmsg").getAsString());
    }
}

From source file:org.lightjason.agentspeak.action.builtin.TestCActionMathStatistics.java

/**
 * test linear selection//from  w  w  w.  ja v  a 2 s  .co  m
 */
@Test
public final void linearselection() {
    final List<ITerm> l_return = Collections.synchronizedList(new ArrayList<>());

    IntStream.range(0, 6500).parallel()
            .forEach(i -> new CLinearSelection().execute(false, IContext.EMPTYPLAN,
                    Stream.of(Stream.of("c", "d").collect(Collectors.toList()),
                            Stream.of(3, 7).collect(Collectors.toList())).map(CRawTerm::from)
                            .collect(Collectors.toList()),
                    l_return));

    Assert.assertEquals(
            (double) Collections.frequency(l_return.stream().map(ITerm::raw).collect(Collectors.toList()), "c")
                    / l_return.size(),
            0.3, 0.05);

    Assert.assertEquals(
            (double) Collections.frequency(l_return.stream().map(ITerm::raw).collect(Collectors.toList()), "d")
                    / l_return.size(),
            0.7, 0.05);
}

From source file:com.thinkbiganalytics.metadata.rest.api.DebugController.java

/**
 * Prints the nodes of the JCR path given, for debugging.
 *
 * @param query the jcr query//w  ww .  ja v  a 2s.  com
 * @return a printout of the JCR tree
 */
@GET
@Path("jcr-sql")
@Produces({ MediaType.TEXT_PLAIN, MediaType.APPLICATION_JSON })
public JcrQueryResult queryJcr(@QueryParam("query") final String query) {
    this.accessController.checkPermission(AccessController.SERVICES, MetadataAccessControl.ADMIN_METADATA);

    return metadata.read(() -> {
        List<List<String>> rows = new ArrayList<>();
        Long startTime = System.currentTimeMillis();
        JcrQueryResult jcrQueryResult = new JcrQueryResult();

        try {
            Session session = JcrMetadataAccess.getActiveSession();

            Workspace workspace = (Workspace) session.getWorkspace();

            String explainPlain = JcrQueryUtil.explainPlain(session, query);
            //start the timer now:
            startTime = System.currentTimeMillis();

            QueryResult result = JcrQueryUtil.query(session, query);
            jcrQueryResult.setExplainPlan(explainPlain);
            RowIterator rowItr = result.getRows();
            List<JcrQueryResultColumn> columns = new ArrayList<>();
            String colsStr = StringUtils.substringAfter(query.toLowerCase(), "select");
            colsStr = StringUtils.substringBefore(colsStr, "from");
            if (StringUtils.isNotBlank(colsStr)) {
                colsStr = colsStr.trim();
                columns = Arrays.asList(colsStr.split(",")).stream().map(c -> {
                    String columnName = c;
                    if (c.contains("as ")) {
                        columnName = StringUtils.substringAfter(c, "as ");
                    } else if (c.contains(" ")) {
                        columnName = StringUtils.substringAfter(c, " ");
                    }
                    return new JcrQueryResultColumn(columnName);
                }).collect(Collectors.toList());
            }
            jcrQueryResult.setColumns(columns);

            while (rowItr.hasNext()) {
                Row row = rowItr.nextRow();
                Value[] rowValues = row.getValues();
                if (rowValues != null) {
                    if (rowValues.length != columns.size()) {
                        columns = IntStream.range(0, rowValues.length)
                                .mapToObj(i -> new JcrQueryResultColumn("Column " + i))
                                .collect(Collectors.toList());
                        jcrQueryResult.setColumns(columns);
                    }
                    JcrQueryResultRow jcrQueryResultRow = new JcrQueryResultRow();
                    jcrQueryResult.addRow(jcrQueryResultRow);
                    List<JcrQueryResultColumnValue> jcrQueryResultColumnValues = Arrays.asList(rowValues)
                            .stream().map(v -> {
                                try {
                                    String value = v.getString();
                                    return new JcrQueryResultColumnValue(value);
                                } catch (Exception e) {
                                    return new JcrQueryResultColumnValue("ERROR: " + e.getMessage());
                                }
                            }).collect(Collectors.toList());
                    jcrQueryResultRow.setColumnValues(jcrQueryResultColumnValues);
                }
            }
        } catch (Exception e) {
            throw new RuntimeException(e);
        }
        long totalTime = System.currentTimeMillis() - startTime;
        jcrQueryResult.setQueryTime(totalTime);
        return jcrQueryResult;

    });
}

From source file:eu.amidst.dynamic.inference.DynamicMAPInference.java

/**
 * Computes Dynamic MAP for the even static model.
 *///  w ww.j  av  a  2 s.  c om
public void computeMergedClassVarModels() {

    DynamicDAG dynamicDAG = model.getDynamicDAG();
    DynamicVariables dynamicVariables = model.getDynamicVariables();

    mergedClassVarModels = new ArrayList<>(nMergedClassVars);

    IntStream.range(0, nMergedClassVars).forEachOrdered(modelNumber -> {

        //            System.out.println("Model number " + modelNumber);
        Variables variables = obtainReplicatedStaticVariables(dynamicVariables, modelNumber);

        DAG dag = obtainStaticDAG(dynamicDAG, variables, modelNumber);
        //            System.out.println(dag.toString());

        BayesianNetwork bn = obtainStaticMergedClassVarNetwork(dag, variables, modelNumber);
        //            System.out.println(bn.toString());

        //            int replicationsMAPVariable = (modelNumber==0 ? 0 : 1) + (nTimeSteps-modelNumber)/nMergedClassVars + ((nTimeSteps-modelNumber)%nMergedClassVars==0 ? 0 : 1);

        //            IntStream.range(0,replicationsMAPVariable).forEach(i-> {
        //                System.out.println("Variable " + groupedClassName + "_t" + i);
        //                System.out.println(bn.getConditionalDistribution(bn.getVariables().getVariableByName(groupedClassName + "_t" + i)).toString());
        //                System.out.println();
        //            });

        //            System.out.println(bn.getConditionalDistribution(bn.getVariables().getVariableByName(groupedClassName + "_t1")).toString());
        //            System.out.println();
        //            System.out.println(bn.getConditionalDistribution(bn.getVariables().getVariableByName(groupedClassName + "_t2")).toString());
        //            System.out.println();
        //            System.out.println(bn.getConditionalDistribution(bn.getVariables().getVariableByName(groupedClassName + "_t3")).toString());
        //            System.out.println();
        mergedClassVarModels.add(bn);
        //            System.out.println("MODEL " + modelNumber);
        //            System.out.println(bn);
    });
}