Example usage for java.util.stream IntStream range

List of usage examples for java.util.stream IntStream range

Introduction

In this page you can find the example usage for java.util.stream IntStream range.

Prototype

public static IntStream range(int startInclusive, int endExclusive) 

Source Link

Document

Returns a sequential ordered IntStream from startInclusive (inclusive) to endExclusive (exclusive) by an incremental step of 1 .

Usage

From source file:com.intuit.wasabi.tests.service.priority.BatchPriorityAssignmentTest.java

@Test(groups = { "batchAssign" }, dependsOnGroups = { "setup" })
public void t_batchAssign() {
    String lables = "{\"labels\": ["
            + validExperimentsLists.stream().map(s -> "\"" + s.label + "\"").collect(Collectors.joining(","))
            + "]}";
    response = apiServerConnector.doPost(
            "/assignments/applications/" + validExperimentsLists.get(0).applicationName + "/users/johnDoe",
            lables);//from w ww.  j  a  v  a2  s.c  om
    assertReturnCode(response, HttpStatus.SC_OK);
    LOGGER.debug("status: " + response.statusCode() + "\noutput: " + response.asString());
    Type listType = new TypeToken<Map<String, ArrayList<Map<String, Object>>>>() {
    }.getType();
    Map<String, List<Map<String, Object>>> result = new Gson().fromJson(response.asString(), listType);
    List<Map<String, Object>> assignments = result.get("assignments");
    Assert.assertNotNull(assignments.get(0).get("assignment"));
    IntStream.range(1, assignments.size())
            .forEach(i -> Assert.assertNull(assignments.get(i).get("assignment")));
}

From source file:org.apache.hadoop.hbase.client.TestAsyncTable.java

@Test
public void testSimpleMultiple() throws Exception {
    AsyncTableBase table = getTable.get();
    int count = 100;
    CountDownLatch putLatch = new CountDownLatch(count);
    IntStream.range(0, count)
            .forEach(i -> table.put(new Put(concat(row, i)).addColumn(FAMILY, QUALIFIER, concat(VALUE, i)))
                    .thenAccept(x -> putLatch.countDown()));
    putLatch.await();//  w  w  w  .  j ava  2 s  . c  o  m
    BlockingQueue<Boolean> existsResp = new ArrayBlockingQueue<>(count);
    IntStream.range(0, count).forEach(i -> table.exists(new Get(concat(row, i)).addColumn(FAMILY, QUALIFIER))
            .thenAccept(x -> existsResp.add(x)));
    for (int i = 0; i < count; i++) {
        assertTrue(existsResp.take());
    }
    BlockingQueue<Pair<Integer, Result>> getResp = new ArrayBlockingQueue<>(count);
    IntStream.range(0, count).forEach(i -> table.get(new Get(concat(row, i)).addColumn(FAMILY, QUALIFIER))
            .thenAccept(x -> getResp.add(Pair.newPair(i, x))));
    for (int i = 0; i < count; i++) {
        Pair<Integer, Result> pair = getResp.take();
        assertArrayEquals(concat(VALUE, pair.getFirst()), pair.getSecond().getValue(FAMILY, QUALIFIER));
    }
    CountDownLatch deleteLatch = new CountDownLatch(count);
    IntStream.range(0, count)
            .forEach(i -> table.delete(new Delete(concat(row, i))).thenAccept(x -> deleteLatch.countDown()));
    deleteLatch.await();
    IntStream.range(0, count).forEach(i -> table.exists(new Get(concat(row, i)).addColumn(FAMILY, QUALIFIER))
            .thenAccept(x -> existsResp.add(x)));
    for (int i = 0; i < count; i++) {
        assertFalse(existsResp.take());
    }
    IntStream.range(0, count).forEach(i -> table.get(new Get(concat(row, i)).addColumn(FAMILY, QUALIFIER))
            .thenAccept(x -> getResp.add(Pair.newPair(i, x))));
    for (int i = 0; i < count; i++) {
        Pair<Integer, Result> pair = getResp.take();
        assertTrue(pair.getSecond().isEmpty());
    }
}

From source file:org.lightjason.agentspeak.agent.TestCAgent.java

/**
 * static function to run an agent//from   ww w .ja v  a 2s  . c o m
 *
 * @param p_script script path
 * @param p_name agent name
 * @return tupel & string
 */
private static Pair<Boolean, String> testAgentManual(final String p_script, final String p_name) {
    final IAgent<?> l_agent;
    try (final InputStream l_stream = new FileInputStream(p_script);) {
        l_agent = new CAgentGenerator(l_stream, ACTIONS.keySet(), new CAggregation(ACTIONS),
                Collections.<IPlanBundle>emptySet(), new CVariableBuilder()).generatesingle();

        // run 5 cycles
        IntStream.range(0, 5).forEach(i -> {
            try {
                l_agent.call();
                l_agent.beliefbase().add(CLiteral.from("counter", Stream.of(CRawTerm.from(i))));
                l_agent.trigger(CTrigger.from(ITrigger.EType.DELETEGOAL, CLiteral.from("myexternal")));
            } catch (final Exception l_exception) {
                assertTrue(
                        MessageFormat
                                .format("{0} {1}", l_exception.getClass().getName(),
                                        l_exception.getMessage().isEmpty() ? "" : l_exception.getMessage())
                                .trim(),
                        false);
            }
        });
    } catch (final Exception l_exception) {
        return new ImmutablePair<>(false,
                MessageFormat.format("{0} passed with failure: {1}", p_name, l_exception));
    }

    return new ImmutablePair<>(true, MessageFormat.format("{0} passed successfully in: {1}", p_name, l_agent));
}

From source file:org.lightjason.examples.pokemon.CMain.java

/**
 * execute simulation/*from w w  w. ja  v a2  s.co m*/
 *
 * @param p_screen screen reference
 */
private static void execute(final CScreen p_screen) {
    IntStream.range(0, CConfiguration.INSTANCE.simulationsteps()).mapToObj(i -> {
        // update screen take screenshot and run object execution
        p_screen.iteration(i);
        Stream.concat(Stream.of(
                //CConfiguration.INSTANCE.evaluation(),
                CConfiguration.INSTANCE.environment()),
                Stream.concat(CConfiguration.INSTANCE.staticelements().parallelStream(),
                        CConfiguration.INSTANCE.agents().parallelStream()))
                .parallel().forEach(j -> {
                    try {
                        j.call();
                    } catch (final Exception l_exception) {
                        LOGGER.warning(l_exception.toString());
                        if (CConfiguration.INSTANCE.stackstrace())
                            l_exception.printStackTrace(System.err);
                    }
                });

        // thread sleep for slowing down
        if (CConfiguration.INSTANCE.threadsleeptime() > 0)
            try {
                Thread.sleep(CConfiguration.INSTANCE.threadsleeptime());
            } catch (final InterruptedException l_exception) {
                LOGGER.warning(l_exception.toString());
            }

        // checks that the simulation is closed
        return p_screen.isDisposed();
    }).filter(i -> i).findFirst();
}

From source file:org.apache.sysml.hops.estim.EstimatorMatrixHistogram.java

public double estimIntern(MatrixHistogram h1, MatrixHistogram h2, OpCode op, long[] misc) {
    double msize = (double) h1.getRows() * h1.getCols();
    switch (op) {
    case MM://from   w  ww.j a v  a  2  s. com
        return estimInternMM(h1, h2);
    case MULT: {
        final double scale = IntStream.range(0, h1.getCols()).mapToDouble(j -> (double) h1.cNnz[j] * h2.cNnz[j])
                .sum() / h1.getNonZeros() / h2.getNonZeros();
        return IntStream.range(0, h1.getRows()).mapToDouble(i -> (double) h1.rNnz[i] * h2.rNnz[i] * scale) //collisions
                .sum() / msize;
    }
    case PLUS: {
        final double scale = IntStream.range(0, h1.getCols()).mapToDouble(j -> (double) h1.cNnz[j] * h2.cNnz[j])
                .sum() / h1.getNonZeros() / h2.getNonZeros();
        return IntStream.range(0, h1.getRows()).mapToDouble(i -> (double) h1.rNnz[i] + h2.rNnz[i] //all minus collisions
                - (double) h1.rNnz[i] * h2.rNnz[i] * scale).sum() / msize;
    }
    case EQZERO:
        return OptimizerUtils.getSparsity(h1.getRows(), h1.getCols(),
                (long) h1.getRows() * h1.getCols() - h1.getNonZeros());
    case DIAG:
        return (h1.getCols() == 1) ? OptimizerUtils.getSparsity(h1.getRows(), h1.getRows(), h1.getNonZeros())
                : OptimizerUtils.getSparsity(h1.getRows(), 1, Math.min(h1.getRows(), h1.getNonZeros()));
    //binary operations that preserve sparsity exactly
    case CBIND:
        return OptimizerUtils.getSparsity(h1.getRows(), h1.getCols() + h2.getCols(),
                h1.getNonZeros() + h2.getNonZeros());
    case RBIND:
        return OptimizerUtils.getSparsity(h1.getRows() + h2.getRows(), h1.getCols(),
                h1.getNonZeros() + h2.getNonZeros());
    //unary operation that preserve sparsity exactly
    case NEQZERO:
    case TRANS:
    case RESHAPE:
        return OptimizerUtils.getSparsity(h1.getRows(), h1.getCols(), h1.getNonZeros());
    default:
        throw new NotImplementedException();
    }
}

From source file:org.lightjason.agentspeak.action.builtin.TestCActionCollectionMultimap.java

/**
 * test as-map//from  w  w  w  .  j  a va 2s.  com
 */
@Test
public final void asmap() {
    final Multimap<Integer, String> l_map = HashMultimap.create();
    final List<ITerm> l_return = new ArrayList<>();

    final Random l_random = new Random();
    IntStream.range(0, 5).map(i -> l_random.nextInt(8))
            .forEach(i -> l_map.put(i, RandomStringUtils.random(10, "abcdefghijklmnop")));

    new CAsMap().execute(false, IContext.EMPTYPLAN,
            Stream.of(l_map).map(CRawTerm::from).collect(Collectors.toList()), l_return);

    Assert.assertEquals(l_return.size(), 1);
    Assert.assertTrue(l_return.get(0).raw() instanceof Map<?, ?>);
    Assert.assertArrayEquals(l_return.get(0).<Map<?, ?>>raw().keySet().toArray(), l_map.keySet().toArray());

    Assert.assertArrayEquals(
            CCommon.flatten(l_return.get(0).<Map<?, ?>>raw().values().stream().map(CRawTerm::from))
                    .map(ITerm::raw).toArray(),
            l_map.values().toArray());
}

From source file:io.pravega.controller.store.stream.PersistentStreamBase.java

/***
 * Creates a new stream record in the stream store.
 * Create a new task of type Create./*from   w ww. j  a va 2 s . c o  m*/
 * If create task already exists, use that and bring it to completion
 * If no task exists, fall through all create steps. They are all idempotent
 * <p>
 * Create Steps:
 * 1. Create new store configuration
 * 2. Create new segment table.
 * 3. Create new history table.
 * 4. Create new index
 *
 * @param configuration stream configuration.
 * @return : future of whether it was done or not
 */
@Override
public CompletableFuture<CreateStreamResponse> create(final StreamConfiguration configuration,
        long createTimestamp) {

    return checkScopeExists().thenCompose((Void v) -> checkStreamExists(configuration, createTimestamp))
            .thenCompose(createStreamResponse -> storeCreationTimeIfAbsent(createStreamResponse.getTimestamp())
                    .thenCompose(
                            (Void v) -> createConfigurationIfAbsent(createStreamResponse.getConfiguration()))
                    .thenCompose((Void v) -> createStateIfAbsent(State.CREATING))
                    .thenCompose((Void v) -> createNewSegmentTable(createStreamResponse.getConfiguration(),
                            createStreamResponse.getTimestamp()))
                    .thenCompose((Void v) -> getState()).thenCompose(state -> {
                        if (state.equals(State.CREATING)) {
                            return createNewEpoch(0);
                        } else {
                            return CompletableFuture.completedFuture(null);
                        }
                    }).thenCompose((Void v) -> {
                        final int numSegments = createStreamResponse.getConfiguration().getScalingPolicy()
                                .getMinNumSegments();
                        final byte[] historyTable = TableHelper.createHistoryTable(
                                createStreamResponse.getTimestamp(),
                                IntStream.range(0, numSegments).boxed().collect(Collectors.toList()));

                        return createHistoryTableIfAbsent(new Data<>(historyTable, null));
                    })
                    .thenCompose((Void v) -> createIndexTableIfAbsent(new Data<>(
                            TableHelper.createIndexTable(createStreamResponse.getTimestamp(), 0), null)))
                    .thenApply((Void v) -> createStreamResponse));
}

From source file:com.streamsets.pipeline.stage.processor.parser.DataParserProcessor.java

@Override
protected void process(Record record, SingleLaneBatchMaker batchMaker) throws StageException {
    Field field = record.get(configs.fieldPathToParse);
    if (field != null) {
        try {/*from w  ww  . ja  v  a2 s .  c o  m*/
            final String parserId = String.format("%s_%s_%s", getContext().getStageInfo().getInstanceName(),
                    record.getHeader().getSourceId(), configs.fieldPathToParse);
            List<Record> parsedRecords;
            switch (field.getType()) {
            case STRING:
                try (DataParser parser = getContext().getService(DataFormatParserService.class)
                        .getParser(parserId, field.getValueAsString())) {
                    parsedRecords = parser.parseAll();
                }
                break;
            case BYTE_ARRAY:
                try (DataParser parser = getContext().getService(DataFormatParserService.class)
                        .getParser(parserId, field.getValueAsByteArray())) {
                    parsedRecords = parser.parseAll();
                }
                break;
            case FILE_REF:
                try {
                    final InputStream inputStream = field.getValueAsFileRef().createInputStream(getContext(),
                            InputStream.class);
                    byte[] fieldData = IOUtils.toByteArray(inputStream);

                    try (DataParser parser = getContext().getService(DataFormatParserService.class)
                            .getParser(parserId, fieldData)) {
                        parsedRecords = parser.parseAll();
                    }
                } catch (IOException e) {
                    throw new OnRecordErrorException(record, Errors.DATAPARSER_04, configs.fieldPathToParse,
                            record.getHeader().getSourceId(), e.getMessage(), e);
                }
                break;
            default:
                throw new OnRecordErrorException(record, Errors.DATAPARSER_02, configs.fieldPathToParse,
                        field.getType().name());

            }

            if (parsedRecords == null || parsedRecords.isEmpty()) {
                LOG.warn("No records were parsed from field {} of record {}", configs.fieldPathToParse,
                        record.getHeader().getSourceId());
                batchMaker.addRecord(record);
                return;
            }
            switch (configs.multipleValuesBehavior) {
            case FIRST_ONLY:
                final Record first = parsedRecords.get(0);
                record.set(configs.parsedFieldPath, first.get());
                batchMaker.addRecord(record);
                break;
            case ALL_AS_LIST:
                List<Field> multipleFieldValues = new LinkedList<>();
                parsedRecords.forEach(parsedRecord -> multipleFieldValues.add(parsedRecord.get()));
                record.set(configs.parsedFieldPath, Field.create(multipleFieldValues));
                batchMaker.addRecord(record);
                break;
            case SPLIT_INTO_MULTIPLE_RECORDS:
                final String recordIdSuffix = configs.fieldPathToParse.replaceFirst("^/", "").replaceAll("/",
                        "_");
                IntStream.range(0, parsedRecords.size()).forEach(idx -> {
                    Record parsedRecord = parsedRecords.get(idx);
                    Record splitRecord = getContext().cloneRecord(record,
                            String.format("%s_%d", recordIdSuffix, idx));
                    splitRecord.set(configs.parsedFieldPath, parsedRecord.get());
                    batchMaker.addRecord(splitRecord);
                });
                break;
            }
        } catch (DataParserException | IOException ex) {
            throw new OnRecordErrorException(record, Errors.DATAPARSER_01, configs.fieldPathToParse,
                    record.getHeader().getSourceId(), ex.toString(), ex);
        }
    } else {
        throw new OnRecordErrorException(record, Errors.DATAPARSER_05, configs.fieldPathToParse,
                record.getHeader().getSourceId());
    }
}

From source file:com.devicehive.service.DeviceCommandServiceTest.java

@Test
@DirtiesContext(methodMode = DirtiesContext.MethodMode.BEFORE_METHOD)
public void testFindCommandsByGuidAndName() throws Exception {
    final List<String> names = IntStream.range(0, 5).mapToObj(i -> RandomStringUtils.randomAlphabetic(10))
            .collect(Collectors.toList());
    final Date timestampSt = timestampService.getDate();
    final Date timestampEnd = timestampService.getDate();
    final String parameters = "{\"param1\":\"value1\",\"param2\":\"value2\"}";
    final String guid = UUID.randomUUID().toString();

    final Set<String> namesForSearch = new HashSet<>(Arrays.asList(names.get(0), names.get(2), names.get(3)));

    final List<DeviceCommand> commandList = namesForSearch.stream().map(name -> {
        DeviceCommand command = new DeviceCommand();
        command.setId(System.nanoTime());
        command.setDeviceGuid(guid);/*from   ww w .  ja  v a2 s.c om*/
        command.setCommand(name);
        command.setTimestamp(timestampService.getDate());
        command.setParameters(new JsonStringWrapper(parameters));
        command.setStatus(DEFAULT_STATUS);
        return command;
    }).collect(Collectors.toList());

    when(requestHandler.handle(any(Request.class))).then(invocation -> {
        CommandSearchResponse response = new CommandSearchResponse();
        response.setCommands(commandList);
        return Response.newBuilder().withBody(response).buildSuccess();
    });

    deviceCommandService.find(Collections.singleton(guid), names, timestampSt, timestampEnd, DEFAULT_STATUS)
            .thenAccept(commands -> {
                assertEquals(3, commands.size());
                assertEquals(new HashSet<>(commandList), new HashSet<>(commands));
            }).get(15, TimeUnit.SECONDS);

    verify(requestHandler, times(1)).handle(argument.capture());
}

From source file:kishida.cnn.layers.FullyConnect.java

@Override
public float[] backward(float[] in, float[] delta) {
    Arrays.fill(newDelta, 0);/*from w w  w  . j  a  v  a  2s.  c  o m*/
    Arrays.fill(diffed, 0);
    for (int i = 0; i < result.length; ++i) {
        diffed[i] = activation.diff(result[i]);
    }
    IntStream.range(0, in.length).parallel().forEach((i) -> {
        for (int j = 0; j < outputSize; ++j) {
            if (dropout[j] != 1) {
                continue;
            }
            float d = diffed[j] * delta[j];
            newDelta[i] += d * weight[i * outputSize + j];//in[i] *;
            weightDelta[i * outputSize + j] += d * in[i] * parent.getLearningRate();
        }
    });
    IntStream.range(0, outputSize).parallel().filter(j -> dropout[j] == 1).forEach(j -> {
        biasDelta[j] += diffed[j] * delta[j] * parent.getLearningRate();
    });
    return newDelta;
}