Example usage for java.util.stream IntStream rangeClosed

List of usage examples for java.util.stream IntStream rangeClosed

Introduction

In this page you can find the example usage for java.util.stream IntStream rangeClosed.

Prototype

public static IntStream rangeClosed(int startInclusive, int endInclusive) 

Source Link

Document

Returns a sequential ordered IntStream from startInclusive (inclusive) to endInclusive (inclusive) by an incremental step of 1 .

Usage

From source file:com.ikanow.aleph2.shared.crud.elasticsearch.services.TestElasticsearchCrudService.java

@Test
public void test_Counting() throws InterruptedException, ExecutionException {

    final ElasticsearchCrudService<TestBean> service = getTestService("testCounting", TestBean.class);

    final List<TestBean> l = IntStream.rangeClosed(0, 9).boxed()
            .map(i -> BeanTemplateUtils.build(TestBean.class).with("_id", "id" + i)
                    .with("test_string", "test_string" + i).with("test_long", (Long) (long) i).done().get())
            .collect(Collectors.toList());

    for (TestBean t : l) {
        service.storeObject(t).get();/*  w w w .  j a  v a  2s  .co  m*/
    }

    assertEquals(10, (long) service.countObjects().get());

    service.optimizeQuery(Arrays.asList("test_string")).get(); // (The get() waits for completion)

    // 1) all docs

    assertEquals(10L, (long) service.countObjects().get());

    // 2) count subset of docs

    final QueryComponent<TestBean> query_2 = CrudUtils.allOf(TestBean.class)
            .rangeAbove("test_string", "test_string4", false).withPresent("test_long")
            .orderBy(Tuples._2T("test_long", 1));

    assertEquals(6L, (long) service.countObjectsBySpec(query_2).get());

    // 3) subset of docs (limit)

    final QueryComponent<TestBean> query_3 = CrudUtils.allOf(TestBean.class)
            .rangeAbove("test_string", "test_string6", false).withPresent("test_long")
            .orderBy(Tuples._2T("test_long", 1)).limit(4);

    assertEquals(4L, (long) service.countObjectsBySpec(query_3).get());

    // 4) no docs

    final QueryComponent<TestBean> query_4 = CrudUtils.allOf(TestBean.class)
            .rangeAbove("test_string", "test_string99", false).withPresent("test_long")
            .orderBy(Tuples._2T("test_long", 1)).limit(4);

    assertEquals(0L, (long) service.countObjectsBySpec(query_4).get());
}

From source file:energy.usef.core.service.business.CorePlanboardBusinessServiceTest.java

@Test
public void testFindLastPrognosesLocalDatePrognosisType() throws Exception {
    final LocalDate period = new LocalDate(2015, 1, 1);
    final String usefIdentifier = "brp1.usef-example.com";

    Mockito.when(ptuPrognosisRepository.findLastPrognoses(Matchers.any(LocalDate.class),
            Matchers.eq(Optional.of(energy.usef.core.model.PrognosisType.A_PLAN)),
            Matchers.eq(Optional.empty()), Matchers.eq(Optional.empty())))
            .then(call -> IntStream.rangeClosed(1, 96).mapToObj(index -> {
                PtuPrognosis ptuPrognosis = new PtuPrognosis();
                PtuContainer ptuContainer = new PtuContainer((LocalDate) call.getArguments()[0], index);
                ptuPrognosis.setType(energy.usef.core.model.PrognosisType.A_PLAN);
                ptuPrognosis.setConnectionGroup(new BrpConnectionGroup(usefIdentifier));
                ptuPrognosis.setPtuContainer(ptuContainer);
                return ptuPrognosis;
            }).collect(Collectors.toList()));

    // invocation
    List<PtuPrognosis> lastPrognoses = corePlanboardBusinessService.findLastPrognoses(period,
            energy.usef.core.model.PrognosisType.A_PLAN);
    // assertions
    Assert.assertNotNull(lastPrognoses);
    Assert.assertEquals(96, lastPrognoses.size());
    Mockito.verify(ptuPrognosisRepository, Mockito.times(1)).findLastPrognoses(Matchers.eq(period),
            Matchers.eq(Optional.of(energy.usef.core.model.PrognosisType.A_PLAN)),
            Matchers.eq(Optional.empty()), Matchers.eq(Optional.empty()));
}

From source file:energy.usef.core.service.business.CorePlanboardBusinessServiceTest.java

@Test
public void testFindLastPrognosesLocalDateString() throws Exception {
    final LocalDate period = new LocalDate(2015, 1, 1);
    final String usefIdentifier = "brp1.usef-example.com";

    Mockito.when(ptuPrognosisRepository.findLastPrognoses(Matchers.any(LocalDate.class),
            Matchers.eq(Optional.empty()), Matchers.eq(Optional.of(usefIdentifier)),
            Matchers.eq(Optional.empty()))).then(call -> IntStream.rangeClosed(1, 96).mapToObj(index -> {
                PtuPrognosis ptuPrognosis = new PtuPrognosis();
                PtuContainer ptuContainer = new PtuContainer((LocalDate) call.getArguments()[0], index);
                ptuPrognosis.setType(energy.usef.core.model.PrognosisType.A_PLAN);
                ptuPrognosis.setConnectionGroup(new BrpConnectionGroup(usefIdentifier));
                ptuPrognosis.setPtuContainer(ptuContainer);
                return ptuPrognosis;
            }).collect(Collectors.toList()));

    // invocation
    List<PtuPrognosis> lastPrognoses = corePlanboardBusinessService.findLastPrognoses(period, usefIdentifier);
    // assertions
    Assert.assertNotNull(lastPrognoses);
    Assert.assertEquals(96, lastPrognoses.size());
    Mockito.verify(ptuPrognosisRepository, Mockito.times(1)).findLastPrognoses(Matchers.eq(period),
            Matchers.eq(Optional.empty()), Matchers.eq(Optional.of(usefIdentifier)),
            Matchers.eq(Optional.empty()));

}

From source file:energy.usef.core.service.business.CorePlanboardBusinessServiceTest.java

@Test
public void testFindLastPrognosesLocalDate() throws Exception {
    final LocalDate period = new LocalDate(2015, 1, 1);
    final String usefIdentifier = "brp1.usef-example.com";

    Mockito.when(ptuPrognosisRepository.findLastPrognoses(Matchers.any(LocalDate.class),
            Matchers.eq(Optional.empty()), Matchers.eq(Optional.empty()), Matchers.eq(Optional.empty())))
            .then(call -> IntStream.rangeClosed(1, 96).mapToObj(index -> {
                PtuPrognosis ptuPrognosis = new PtuPrognosis();
                PtuContainer ptuContainer = new PtuContainer((LocalDate) call.getArguments()[0], index);
                ptuPrognosis.setType(energy.usef.core.model.PrognosisType.A_PLAN);
                ptuPrognosis.setConnectionGroup(new BrpConnectionGroup(usefIdentifier));
                ptuPrognosis.setPtuContainer(ptuContainer);
                return ptuPrognosis;
            }).collect(Collectors.toList()));

    // invocation
    List<PtuPrognosis> lastPrognoses = corePlanboardBusinessService.findLastPrognoses(period);
    // assertions
    Assert.assertNotNull(lastPrognoses);
    Assert.assertEquals(96, lastPrognoses.size());
    Mockito.verify(ptuPrognosisRepository, Mockito.times(1)).findLastPrognoses(Matchers.eq(period),
            Matchers.eq(Optional.empty()), Matchers.eq(Optional.empty()), Matchers.eq(Optional.empty()));
}

From source file:energy.usef.core.service.business.CorePlanboardBusinessServiceTest.java

private List<Connection> buildConnectionList() {
    List<Connection> connectionList = new ArrayList<>();

    IntStream.rangeClosed(1, 9).forEach(i -> {
        Connection connection = new Connection();
        connection.setEntityAddress("ean1.000001.00" + i);
        connectionList.add(connection);// w  ww .  j ava 2 s .  c  om
    });

    return connectionList;
}

From source file:com.ikanow.aleph2.logging.service.TestLoggingService.java

@SuppressWarnings("unchecked")
@Test//from   w w w  .j  av  a  2s.c om
public void test_simpleLog() throws InterruptedException, ExecutionException {
    final String subsystem_name = "logging_test8";
    final int num_messages_to_log = 50;
    final DataBucketBean test_bucket = getTestBucket("test8", Optional.of(Level.ALL.toString()),
            Optional.empty());
    final IBucketLogger user_logger = logging_service.getLogger(test_bucket);
    final IBucketLogger system_logger = logging_service.getSystemLogger(test_bucket);
    final IBucketLogger external_logger = logging_service.getExternalLogger(subsystem_name);
    //log a few messages
    IntStream.rangeClosed(1, num_messages_to_log).boxed().forEach(i -> {
        user_logger.log(Level.ERROR, true, () -> "test message " + i, () -> subsystem_name);
        system_logger.log(Level.ERROR, true, () -> "test message " + i, () -> subsystem_name);
        external_logger.log(Level.ERROR, true, () -> "test message " + i, () -> subsystem_name);

        user_logger.log(Level.ERROR, true, () -> "test message " + i, () -> subsystem_name, () -> "command");
        system_logger.log(Level.ERROR, true, () -> "test message " + i, () -> subsystem_name, () -> "command");
        external_logger.log(Level.ERROR, true, () -> "test message " + i, () -> subsystem_name,
                () -> "command");

        user_logger.log(Level.ERROR, true, () -> "test message " + i, () -> subsystem_name, () -> "command",
                () -> 32);
        system_logger.log(Level.ERROR, true, () -> "test message " + i, () -> subsystem_name, () -> "command",
                () -> 32);
        external_logger.log(Level.ERROR, true, () -> "test message " + i, () -> subsystem_name, () -> "command",
                () -> 32);

        user_logger.log(Level.ERROR, true, () -> "test message " + i, () -> subsystem_name, () -> "command",
                () -> 32, () -> ImmutableMap.of());
        system_logger.log(Level.ERROR, true, () -> "test message " + i, () -> subsystem_name, () -> "command",
                () -> 32, () -> ImmutableMap.of());
        external_logger.log(Level.ERROR, true, () -> "test message " + i, () -> subsystem_name, () -> "command",
                () -> 32, () -> ImmutableMap.of());

        //merge shorthand rules
        IBasicMessageBeanSupplier message = new BasicMessageBeanSupplier(true, () -> "test_message",
                () -> "command", () -> null, () -> "test_message " + i, () -> ImmutableMap.of());

        user_logger.log(Level.ERROR, message, "key1", LoggingMergeFunctions.replaceMessage());
        system_logger.log(Level.ERROR, message, "key1", LoggingMergeFunctions.replaceMessage());
        external_logger.log(Level.ERROR, message, "key1", LoggingMergeFunctions.replaceMessage());

        user_logger.log(Level.ERROR, message, "key1", (b) -> b, LoggingMergeFunctions.replaceMessage());
        system_logger.log(Level.ERROR, message, "key1", (b) -> b, LoggingMergeFunctions.replaceMessage());
        external_logger.log(Level.ERROR, message, "key1", (b) -> b, LoggingMergeFunctions.replaceMessage());

        user_logger.log(Level.ERROR, message, "key1", Arrays.asList(LoggingRules.logEveryCount(1)),
                LoggingMergeFunctions.replaceMessage());
        system_logger.log(Level.ERROR, message, "key1", Arrays.asList(LoggingRules.logEveryCount(1)),
                LoggingMergeFunctions.replaceMessage());
        external_logger.log(Level.ERROR, message, "key1", Arrays.asList(LoggingRules.logEveryCount(1)),
                LoggingMergeFunctions.replaceMessage());
    });

    user_logger.flush();
    system_logger.flush();
    external_logger.flush();

    //check its in ES, wait 10s max for the index to refresh
    final DataBucketBean logging_test_bucket = BucketUtils.convertDataBucketBeanToLogging(test_bucket);
    final IDataWriteService<BasicMessageBean> logging_crud = search_index_service.getDataService().get()
            .getWritableDataService(BasicMessageBean.class, logging_test_bucket, Optional.empty(),
                    Optional.empty())
            .get();
    waitForResults(logging_crud, 10, num_messages_to_log * 14);
    assertEquals(num_messages_to_log * 14, logging_crud.countObjects().get().longValue());

    final DataBucketBean logging_external_test_bucket = BucketUtils
            .convertDataBucketBeanToLogging(BeanTemplateUtils.clone(test_bucket)
                    .with(DataBucketBean::full_name, "/external/" + subsystem_name + "/").done());
    final IDataWriteService<BasicMessageBean> logging_crud_external = search_index_service.getDataService()
            .get().getWritableDataService(BasicMessageBean.class, logging_external_test_bucket,
                    Optional.empty(), Optional.empty())
            .get();
    waitForResults(logging_crud_external, 10, num_messages_to_log * 7);
    assertEquals(num_messages_to_log * 7, logging_crud_external.countObjects().get().longValue());

    //cleanup
    logging_crud.deleteDatastore().get();
}

From source file:com.ikanow.aleph2.shared.crud.elasticsearch.services.TestElasticsearchCrudService.java

protected static void replenishDocsForDeletion(ICrudService<TestBean> service, int size)
        throws InterruptedException, ExecutionException {

    final List<TestBean> l = IntStream.rangeClosed(0, size - 1).boxed()
            .map(i -> BeanTemplateUtils.build(TestBean.class).with("_id", "id" + i)
                    .with("test_string", "test_string" + i).with("test_long", (Long) (long) i).done().get())
            .collect(Collectors.toList());

    service.storeObjects(l, false).get();

    assertEquals(size, service.countObjects().get().intValue());
}

From source file:com.ikanow.aleph2.shared.crud.elasticsearch.services.TestElasticsearchCrudService.java

protected static void replenishDocsForDeletion_JSON(ICrudService<JsonNode> service)
        throws InterruptedException, ExecutionException {

    final List<JsonNode> l = IntStream.rangeClosed(0, 9).boxed()
            .map(i -> BeanTemplateUtils.build(TestBean.class).with("_id", "id" + i)
                    .with("test_string", "test_string" + i).with("test_long", (Long) (long) i).done().get())
            .map(b -> BeanTemplateUtils.toJson(b)).collect(Collectors.toList());

    service.storeObjects(l, false).get();

    assertEquals(10, service.countObjects().get().intValue());
}

From source file:com.ikanow.aleph2.analytics.services.TestDeduplicationService.java

public void test_puttingItAllTogether_genericPhase() throws InterruptedException {

    // 1) Create 2 "context" buckets

    final DataBucketBean context_bucket1 = BeanTemplateUtils.build(DataBucketBean.class)
            .with(DataBucketBean::full_name, "/dedup/context1")
            .with(DataBucketBean::data_schema, BeanTemplateUtils.build(DataSchemaBean.class)
                    .with(DataSchemaBean::document_schema,
                            BeanTemplateUtils.build(DataSchemaBean.DocumentSchemaBean.class).done().get())
                    .done().get())//w  w  w. ja  va  2  s . co  m
            .done().get();

    final DataBucketBean context_bucket2 = BeanTemplateUtils.clone(context_bucket1)
            .with(DataBucketBean::full_name, "/dedup/context2").done();

    _service_context.getCoreManagementDbService().getDataBucketStore().deleteDatastore().join();
    _service_context.getCoreManagementDbService().getDataBucketStore().storeObject(context_bucket1, true)
            .join();
    _service_context.getCoreManagementDbService().getDataBucketStore().storeObject(context_bucket2, true)
            .join();
    assertEquals(2, _service_context.getCoreManagementDbService().getDataBucketStore().countObjects().join()
            .intValue());

    IDocumentService doc_service = _service_context.getDocumentService().get();

    IDataWriteService<JsonNode> write_context1 = doc_service.getDataService().get()
            .getWritableDataService(JsonNode.class, context_bucket1, Optional.empty(), Optional.empty()).get();
    IDataWriteService<JsonNode> write_context2 = doc_service.getDataService().get()
            .getWritableDataService(JsonNode.class, context_bucket2, Optional.empty(), Optional.empty()).get();

    write_context1.deleteDatastore();
    write_context2.deleteDatastore();
    for (int i = 0; i < 40; ++i) {
        Thread.sleep(250L);
        if ((write_context1.countObjects().join() + write_context2.countObjects().join()) <= 0) {
            break;
        }
        //System.out.println("?? " + store1.countObjects().join() + "..." + store2.countObjects().join());
    }
    assertEquals(0, write_context1.countObjects().join() + write_context2.countObjects().join());

    // 2) Fill with 50% duplicates, 50% random records

    List<JsonNode> objs_for_context1 = IntStream.rangeClosed(1, num_write_records).boxed().map(i -> {
        final ObjectNode obj = _mapper.createObjectNode();
        obj.put("_id", "id1_" + i);
        obj.put("dup", true);
        obj.put("dup_field", i);
        obj.put("alt_dup_field", i);
        obj.put("@timestamp", 0L);
        return (JsonNode) obj;
    }).collect(Collectors.toList());

    List<JsonNode> objs_for_context2 = IntStream.rangeClosed(1, num_write_records).boxed().map(i -> {
        final ObjectNode obj = _mapper.createObjectNode();
        obj.put("_id", "id2_" + i);
        obj.put("dup", false);
        obj.put("dup_field", i);
        obj.put("alt_dup_field", -i);
        obj.put("@timestamp", 0L);
        return (JsonNode) obj;
    }).collect(Collectors.toList());

    write_context1.storeObjects(objs_for_context1).join();
    write_context2.storeObjects(objs_for_context2).join();

    // OK wait for these writes to be complete

    for (int i = 0; i < 40; ++i) {
        Thread.sleep(250L);
        if ((write_context1.countObjects().join() >= num_write_records)
                && (write_context2.countObjects().join() >= num_write_records)) {
            break;
        }
    }
    assertEquals(500, write_context1.countObjects().join().intValue());
    assertEquals(500, write_context2.countObjects().join().intValue());

    // OK now need to create a shared library bean and insert it

    final SharedLibraryBean bean = BeanTemplateUtils.build(SharedLibraryBean.class)
            .with(SharedLibraryBean::path_name, "/app/aleph2/library/test.jar")
            .with(SharedLibraryBean::batch_enrichment_entry_point, TestDedupEnrichmentModule.class.getName())
            .done().get();

    _service_context.getService(IManagementDbService.class, Optional.empty()).get().getSharedLibraryStore()
            .storeObject(bean, true).join();
}

From source file:com.ikanow.aleph2.analytics.services.TestDeduplicationService.java

public void test_puttingItAllTogether_runTest(final DataBucketBean write_bucket,
        final IEnrichmentModuleContext enrich_context) {
    // OK now create a new batch of objects

    List<Tuple2<Long, IBatchRecord>> imcoming_objects = IntStream.rangeClosed(1, 2 * num_write_records).boxed()
            .map(i -> {/* www .ja v  a2s  . c o  m*/
                final ObjectNode obj = _mapper.createObjectNode();
                obj.put("_id", "id" + i);
                obj.put("dup", true);
                obj.put("dup_field", i);
                obj.put("alt_dup_field", i);
                obj.put("@timestamp", (0 == (i % 2)) ? 0L : 1L); // (ie alternate new with old - in some cases the old won't update)
                return (0 == (i % 10)) ? Stream.of((JsonNode) obj, (JsonNode) obj) // (every 10th emit a duplicate)
                        : Stream.of((JsonNode) obj);
            }).flatMap(s -> s).map(j -> Tuples._2T(0L, (IBatchRecord) new BatchRecordUtils.JsonBatchRecord(j)))
            .collect(Collectors.toList());

    // Other things we need:

    IEnrichmentBatchModule test_module = new DeduplicationService();

    final EnrichmentControlMetadataBean control = BeanTemplateUtils.build(EnrichmentControlMetadataBean.class)
            .with(EnrichmentControlMetadataBean::name, "custom_test").done().get();

    // Initialize

    test_module.onStageInitialize(enrich_context, write_bucket, control,
            Tuples._2T(ProcessingStage.input, ProcessingStage.output), Optional.empty());

    // Run

    test_module.onObjectBatch(imcoming_objects.stream(), Optional.empty(), Optional.empty());

    // (Finish)
    test_module.onStageComplete(true);
}