Example usage for java.util.stream IntStream range

List of usage examples for java.util.stream IntStream range

Introduction

In this page you can find the example usage for java.util.stream IntStream range.

Prototype

public static IntStream range(int startInclusive, int endExclusive) 

Source Link

Document

Returns a sequential ordered IntStream from startInclusive (inclusive) to endExclusive (exclusive) by an incremental step of 1 .

Usage

From source file:org.lightjason.agentspeak.action.builtin.TestCActionMathBlasMatrix.java

/**
 * test normalized graph-laplacian/*from w w  w  .j  av a2s  .co  m*/
 */
@Test
public final void normalizedgraphlaplacian() {
    final List<ITerm> l_return = new ArrayList<>();

    new CNormalizedGraphLaplacian().execute(false, IContext.EMPTYPLAN,
            Stream.of(new SparseDoubleMatrix2D(new double[][] { { 0, 1, 0, 0, 1, 0 }, { 1, 0, 1, 0, 1, 0 },
                    { 0, 1, 0, 1, 0, 0 }, { 0, 0, 1, 0, 1, 1 }, { 1, 1, 0, 1, 0, 0 }, { 0, 0, 0, 1, 0, 0 } }))
                    .map(CRawTerm::from).collect(Collectors.toList()),
            l_return);

    Assert.assertEquals(l_return.size(), 1);
    final DoubleMatrix2D l_result = l_return.get(0).raw();

    IntStream.range(0, l_result.rows()).boxed()
            .forEach(i -> Assert.assertEquals(l_result.getQuick(i, i), 1, 0));
    IntStream.range(0, l_result.rows()).boxed().map(l_result::viewRow).mapToDouble(DoubleMatrix1D::zSum)
            .forEach(i -> Assert.assertEquals(i, 0, 1e-10));
}

From source file:org.apache.nifi.processors.standard.QueryDatabaseTable.java

protected String getQuery(DatabaseAdapter dbAdapter, String tableName, String columnNames,
        List<String> maxValColumnNames, Map<String, String> stateMap) {
    if (StringUtils.isEmpty(tableName)) {
        throw new IllegalArgumentException("Table name must be specified");
    }//from  w  ww . j  a  v  a 2s.com
    final StringBuilder query = new StringBuilder(
            dbAdapter.getSelectStatement(tableName, columnNames, null, null, null, null));

    // Check state map for last max values
    if (stateMap != null && !stateMap.isEmpty() && maxValColumnNames != null) {
        List<String> whereClauses = new ArrayList<>(maxValColumnNames.size());
        IntStream.range(0, maxValColumnNames.size()).forEach((index) -> {
            String colName = maxValColumnNames.get(index);
            String maxValueKey = getStateKey(tableName, colName);
            String maxValue = stateMap.get(maxValueKey);
            if (StringUtils.isEmpty(maxValue)) {
                // If we can't find the value at the fully-qualified key name, it is possible (under a previous scheme)
                // the value has been stored under a key that is only the column name. Fall back to check the column name; either way, when a new
                // maximum value is observed, it will be stored under the fully-qualified key from then on.
                maxValue = stateMap.get(colName.toLowerCase());
            }
            if (!StringUtils.isEmpty(maxValue)) {
                Integer type = columnTypeMap.get(maxValueKey);
                if (type == null) {
                    // This shouldn't happen as we are populating columnTypeMap when the processor is scheduled.
                    throw new IllegalArgumentException("No column type found for: " + colName);
                }
                // Add a condition for the WHERE clause
                whereClauses.add(colName + (index == 0 ? " > " : " >= ")
                        + getLiteralByType(type, maxValue, dbAdapter.getName()));
            }
        });
        if (!whereClauses.isEmpty()) {
            query.append(" WHERE ");
            query.append(StringUtils.join(whereClauses, " AND "));
        }
    }

    return query.toString();
}

From source file:org.ligoj.app.plugin.vm.aws.VmAwsSnapshotResource.java

/**
 * Convert a XML AMI mapping device to {@link VolumeSnapshot} instance.
 *///from w w  w .j a  v a2  s.  co m
private VolumeSnapshot toVolume(final Element element) {
    final VolumeSnapshot snapshot = new VolumeSnapshot();
    snapshot.setName(xml.getTagText(element, "deviceName"));

    // Only for EBS
    final NodeList ebs = element.getElementsByTagName("ebs");
    IntStream.range(0, ebs.getLength()).mapToObj(ebs::item).findFirst().ifPresent(v -> {
        final Element se = (Element) v;
        snapshot.setId(xml.getTagText(se, "snapshotId"));
        snapshot.setSize(Integer.valueOf(StringUtils.defaultString(xml.getTagText(se, "volumeSize"), "0")));
    });

    return snapshot;
}

From source file:com.ikanow.aleph2.storage_service_hdfs.services.TestMockHdfsStorageSystem.java

@Test
public void test_ageOut() throws IOException, InterruptedException, ExecutionException {
    // 0) Setup//from   w  w  w  .j a va 2s.  c  o m
    final String temp_dir = System.getProperty("java.io.tmpdir") + File.separator;

    final GlobalPropertiesBean globals = BeanTemplateUtils.build(GlobalPropertiesBean.class)
            .with(GlobalPropertiesBean::local_yarn_config_dir, temp_dir)
            .with(GlobalPropertiesBean::distributed_root_dir, temp_dir)
            .with(GlobalPropertiesBean::local_root_dir, temp_dir)
            .with(GlobalPropertiesBean::distributed_root_dir, temp_dir).done().get();

    final MockHdfsStorageService storage_service = new MockHdfsStorageService(globals);

    // 1) Set up bucket (code taken from management_db_service)
    final DataBucketBean bucket = BeanTemplateUtils.build(DataBucketBean.class)
            .with(DataBucketBean::full_name, "/test/age/out/bucket")
            .with(DataBucketBean::data_schema, BeanTemplateUtils.build(DataSchemaBean.class)
                    .with(DataSchemaBean::storage_schema, BeanTemplateUtils.build(StorageSchemaBean.class)
                            .with(StorageSchemaBean::raw,
                                    BeanTemplateUtils.build(StorageSchemaBean.StorageSubSchemaBean.class)
                                            .with(StorageSchemaBean.StorageSubSchemaBean::exist_age_max,
                                                    "9 days")
                                            .done().get())
                            .with(StorageSchemaBean::json,
                                    BeanTemplateUtils.build(StorageSchemaBean.StorageSubSchemaBean.class)
                                            .with(StorageSchemaBean.StorageSubSchemaBean::exist_age_max,
                                                    "6 days")
                                            .done().get())
                            .with(StorageSchemaBean::processed,
                                    BeanTemplateUtils.build(StorageSchemaBean.StorageSubSchemaBean.class)
                                            .with(StorageSchemaBean.StorageSubSchemaBean::exist_age_max,
                                                    "1 week")
                                            .done().get())
                            .done().get())
                    .done().get())
            .done().get();

    FileUtils.deleteDirectory(new File(System.getProperty("java.io.tmpdir") + File.separator + "/data/"
            + File.separator + bucket.full_name()));
    setup_bucket(storage_service, bucket, Arrays.asList("$sec_test"));
    final String bucket_path = System.getProperty("java.io.tmpdir") + File.separator + "/data/" + File.separator
            + bucket.full_name();
    assertTrue("The file path has been created", new File(bucket_path + "/managed_bucket").exists());

    final long now = new Date().getTime();
    IntStream.range(4, 10).boxed().map(i -> now - (i * 1000L * 3600L * 24L))
            .forEach(Lambdas.wrap_consumer_u(n -> {
                final String pattern = TimeUtils.getTimeBasedSuffix(TimeUtils.getTimePeriod("1 day").success(),
                        Optional.empty());
                final String dir = DateUtils.formatDate(new Date(n), pattern);

                FileUtils.forceMkdir(
                        new File(bucket_path + "/" + IStorageService.STORED_DATA_SUFFIX_RAW + "/" + dir));
                FileUtils.forceMkdir(
                        new File(bucket_path + "/" + IStorageService.STORED_DATA_SUFFIX_JSON + "/" + dir));
                FileUtils.forceMkdir(
                        new File(bucket_path + "/" + IStorageService.STORED_DATA_SUFFIX_PROCESSED + "/" + dir));
                FileUtils.forceMkdir(new File(bucket_path + "/"
                        + IStorageService.STORED_DATA_SUFFIX_PROCESSED_SECONDARY + "/sec_test/" + dir)); // (mini test for secondary)
            }));

    // (7 cos includes root)
    assertEquals(7,
            FileUtils.listFilesAndDirs(new File(bucket_path + "/" + IStorageService.STORED_DATA_SUFFIX_RAW),
                    DirectoryFileFilter.DIRECTORY, TrueFileFilter.INSTANCE).size());
    assertEquals(7,
            FileUtils.listFilesAndDirs(new File(bucket_path + "/" + IStorageService.STORED_DATA_SUFFIX_JSON),
                    DirectoryFileFilter.DIRECTORY, TrueFileFilter.INSTANCE).size());
    assertEquals(7,
            FileUtils.listFilesAndDirs(
                    new File(bucket_path + "/" + IStorageService.STORED_DATA_SUFFIX_PROCESSED),
                    DirectoryFileFilter.DIRECTORY, TrueFileFilter.INSTANCE).size());
    assertEquals(7,
            FileUtils.listFilesAndDirs(new File(
                    bucket_path + "/" + IStorageService.STORED_DATA_SUFFIX_PROCESSED_SECONDARY + "/sec_test/"),
                    DirectoryFileFilter.DIRECTORY, TrueFileFilter.INSTANCE).size());

    // 1) Normal run:

    CompletableFuture<BasicMessageBean> cf = storage_service.getDataService().get().handleAgeOutRequest(bucket);

    BasicMessageBean res = cf.get();

    assertEquals(true, res.success());
    assertTrue("sensible message: " + res.message(), res.message().contains("raw: deleted 1 "));
    assertTrue("sensible message: " + res.message(), res.message().contains("json: deleted 4 "));
    assertTrue("sensible message: " + res.message(), res.message().contains("processed: deleted 3 "));

    assertTrue("Message marked as loggable: " + res.details(),
            Optional.ofNullable(res.details()).filter(m -> m.containsKey("loggable")).isPresent());

    System.out.println("Return from to delete: " + res.message());

    //(+1 including root)
    assertEquals(6,
            FileUtils.listFilesAndDirs(new File(bucket_path + "/" + IStorageService.STORED_DATA_SUFFIX_RAW),
                    DirectoryFileFilter.DIRECTORY, TrueFileFilter.INSTANCE).size());
    assertEquals(3,
            FileUtils.listFilesAndDirs(new File(bucket_path + "/" + IStorageService.STORED_DATA_SUFFIX_JSON),
                    DirectoryFileFilter.DIRECTORY, TrueFileFilter.INSTANCE).size());
    assertEquals(4,
            FileUtils.listFilesAndDirs(
                    new File(bucket_path + "/" + IStorageService.STORED_DATA_SUFFIX_PROCESSED),
                    DirectoryFileFilter.DIRECTORY, TrueFileFilter.INSTANCE).size());
    assertEquals(4,
            FileUtils.listFilesAndDirs(new File(
                    bucket_path + "/" + IStorageService.STORED_DATA_SUFFIX_PROCESSED_SECONDARY + "/sec_test/"),
                    DirectoryFileFilter.DIRECTORY, TrueFileFilter.INSTANCE).size());

    // 2) Run it again, returns success but not loggable:

    CompletableFuture<BasicMessageBean> cf2 = storage_service.getDataService().get()
            .handleAgeOutRequest(bucket);

    BasicMessageBean res2 = cf2.get();

    assertEquals(true, res2.success());
    assertTrue("sensible message: " + res2.message(), res2.message().contains("raw: deleted 0 "));
    assertTrue("sensible message: " + res2.message(), res2.message().contains("json: deleted 0 "));
    assertTrue("sensible message: " + res2.message(), res2.message().contains("processed: deleted 0 "));
    assertTrue("Message _not_ marked as loggable: " + res2.details(),
            !Optional.ofNullable(res2.details()).map(m -> m.get("loggable")).isPresent());

    // 3) No temporal settings

    final DataBucketBean bucket3 = BeanTemplateUtils.build(DataBucketBean.class)
            .with("full_name", "/test/handle/age/out/delete/not/temporal")
            .with(DataBucketBean::data_schema, BeanTemplateUtils.build(DataSchemaBean.class).done().get())
            .done().get();

    CompletableFuture<BasicMessageBean> cf3 = storage_service.getDataService().get()
            .handleAgeOutRequest(bucket3);
    BasicMessageBean res3 = cf3.get();
    // no temporal settings => returns success
    assertEquals(true, res3.success());

    // 4) Unparseable temporal settings (in theory won't validate but we can test here)

    final DataBucketBean bucket4 = BeanTemplateUtils.build(DataBucketBean.class)
            .with("full_name", "/test/handle/age/out/delete/temporal/malformed")
            .with(DataBucketBean::data_schema,
                    BeanTemplateUtils.build(DataSchemaBean.class).with(DataSchemaBean::storage_schema,
                            BeanTemplateUtils.build(StorageSchemaBean.class).with(StorageSchemaBean::json,
                                    BeanTemplateUtils.build(StorageSchemaBean.StorageSubSchemaBean.class)
                                            .with(StorageSchemaBean.StorageSubSchemaBean::exist_age_max,
                                                    "bananas")
                                            .done().get())
                                    .done().get())
                            .done().get())
            .done().get();

    CompletableFuture<BasicMessageBean> cf4 = storage_service.getDataService().get()
            .handleAgeOutRequest(bucket4);
    BasicMessageBean res4 = cf4.get();
    // no temporal settings => returns success
    assertEquals(false, res4.success());

}

From source file:alfio.manager.AdminReservationManagerIntegrationTest.java

private List<Attendee> generateAttendees(int count) {
    return IntStream.range(0, count).mapToObj(i -> new Attendee(null, "Attendee " + i, "Test" + i,
            "attendee" + i + "@test.ch", "en", false, null, Collections.emptyMap())).collect(toList());
}

From source file:com.ikanow.aleph2.management_db.mongodb.services.TestIkanowV1SyncService_TestBuckets.java

private void insertFakeOutputData(ICrudService<JsonNode> v2_output_index, final DataBucketBean data_bucket,
        int num_objects_to_insert) throws InterruptedException, ExecutionException {
    List<JsonNode> test_objects = IntStream.range(0, num_objects_to_insert).boxed()
            .map(i -> _mapper.createObjectNode().put("test", "test" + i).put("_id", "a" + i))
            .collect(Collectors.toList());
    v2_output_index.storeObjects(test_objects).get();
    Thread.sleep(10000);//from w  w w .j av a2s  . c o  m
    _logger.debug("Inserted: " + num_objects_to_insert + " into test output db");
}

From source file:org.ligoj.app.plugin.vm.aws.VmAwsSnapshotResource.java

/**
 * Parse <code>DescribeImagesResponse</code> response to {@link Snapshot} list.
 *
 * @param amisAsXml/*  w w w .j av a2 s  .  c  o  m*/
 *            AMI descriptions as XML.
 * @return The parsed AMI as {@link Snapshot}.
 */
private List<Snapshot> toAmis(final String amisAsXml)
        throws XPathExpressionException, SAXException, IOException, ParserConfigurationException {
    final NodeList items = xml.getXpath(
            StringUtils.defaultIfEmpty(amisAsXml,
                    "<DescribeImagesResponse><imagesSet></imagesSet></DescribeImagesResponse>"),
            "/DescribeImagesResponse/imagesSet/item");
    return IntStream.range(0, items.getLength()).mapToObj(items::item).map(n -> toAmi((Element) n))
            .collect(Collectors.toList());
}

From source file:com.vsthost.rnd.commons.math.ext.linear.DMatrixUtils.java

/**
 * Consumes the length of an array and returns its shuffled indices.
 *
 * @param length The length of the arrays of which indices to be shuffled.
 * @param randomGenerator Random number generator.
 * @return Shuffled indices.//from   ww w  . j a v  a 2s.c o m
 */
public static int[] shuffleIndices(int length, RandomGenerator randomGenerator) {
    // Initialize indices:
    final int[] indices = IntStream.range(0, length).toArray();

    // Shuffle the array:
    MathArrays.shuffle(indices, randomGenerator);

    // Done return shuffled indices:
    return indices;
}

From source file:org.lightjason.agentspeak.action.builtin.TestCActionMathBlasMatrix.java

/**
 * test identity/* w  w w .  j a  va  2s  .c  om*/
 */
@Test
public final void identity() {
    final int l_size = Math.abs(new Random().nextInt(98) + 2);
    final List<ITerm> l_return = new ArrayList<>();

    new CIdentity().execute(false, IContext.EMPTYPLAN,
            Stream.of(l_size).map(CRawTerm::from).collect(Collectors.toList()), l_return);

    Assert.assertEquals(l_return.size(), 1);
    final DoubleMatrix2D l_result = l_return.get(0).raw();

    Assert.assertTrue(IntStream.range(0, l_result.rows()).boxed()
            .flatMap(i -> IntStream.range(0, l_result.columns()).boxed()
                    .map(j -> i.equals(j) ? l_result.getQuick(i, j) == 1D : l_result.getQuick(i, j) == 0D))
            .allMatch(i -> i));
}

From source file:org.everit.json.schema.loader.SchemaLoader.java

private CombinedSchema.Builder tryCombinedSchema() {
    List<String> presentKeys = COMBINED_SUBSCHEMA_PROVIDERS.keySet().stream().filter(schemaJson::has)
            .collect(Collectors.toList());
    if (presentKeys.size() > 1) {
        throw new SchemaException(
                String.format("expected at most 1 of 'allOf', 'anyOf', 'oneOf', %d found", presentKeys.size()));
    } else if (presentKeys.size() == 1) {
        String key = presentKeys.get(0);
        JSONArray subschemaDefs = schemaJson.getJSONArray(key);
        Collection<Schema> subschemas = IntStream.range(0, subschemaDefs.length())
                .mapToObj(subschemaDefs::getJSONObject).map(this::loadChild).map(Schema.Builder::build)
                .collect(Collectors.toList());
        CombinedSchema.Builder combinedSchema = COMBINED_SUBSCHEMA_PROVIDERS.get(key).apply(subschemas);
        Schema.Builder<?> baseSchema;
        if (schemaJson.has("type")) {
            baseSchema = loadForType(schemaJson.get("type"));
        } else {/*from  ww  w .j a  v  a 2  s.c  o m*/
            baseSchema = sniffSchemaByProps();
        }
        if (baseSchema == null) {
            return combinedSchema;
        } else {
            return CombinedSchema.allOf(Arrays.asList(baseSchema.build(), combinedSchema.build()));
        }
    } else {
        return null;
    }
}