Example usage for java.util.stream Stream concat

List of usage examples for java.util.stream Stream concat

Introduction

In this page you can find the example usage for java.util.stream Stream concat.

Prototype

public static <T> Stream<T> concat(Stream<? extends T> a, Stream<? extends T> b) 

Source Link

Document

Creates a lazily concatenated stream whose elements are all the elements of the first stream followed by all the elements of the second stream.

Usage

From source file:com.ikanow.aleph2.search_service.elasticsearch.utils.ElasticsearchIndexUtils.java

/** Creates a mapping for the bucket - columnar elements
 *  ALSO INCLUDES THE PER-FIELD CONFIGURATION FROM THE SEARCH_INDEX_SCHEMA AND TEMPORAL_SCHMEA
 * @param bucket//from  w  w w.  ja  va 2  s  .  c o m
 * @return
 * @throws IOException 
 */
public static XContentBuilder getColumnarMapping(final DataBucketBean bucket,
        Optional<XContentBuilder> to_embed,
        final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> field_lookups,
        final JsonNode enabled_not_analyzed, final JsonNode enabled_analyzed,
        final JsonNode default_not_analyzed, final JsonNode default_analyzed,
        final Optional<JsonNode> doc_schema, final SearchIndexSchemaDefaultBean search_index_schema_override,
        final ObjectMapper mapper, final String index_type) {
    try {
        final XContentBuilder start = to_embed.orElse(XContentFactory.jsonBuilder().startObject());
        final boolean columnar_enabled = Optional.ofNullable(bucket.data_schema())
                .map(DataSchemaBean::columnar_schema).filter(s -> Optional.ofNullable(s.enabled()).orElse(true))
                .isPresent();

        final Map<Either<String, Tuple2<String, String>>, String> type_override = Optionals
                .of(() -> bucket.data_schema().search_index_schema().type_override()).map(m -> buildTypeMap(m))
                .orElse(Collections.emptyMap());

        // If no columnar settings are specified then go with a sensible default
        final Optional<DataSchemaBean.ColumnarSchemaBean> maybe_user_columnar_schema = Optionals
                .of(() -> bucket.data_schema().columnar_schema());
        final DataSchemaBean.ColumnarSchemaBean columnar_schema = maybe_user_columnar_schema
                .filter(__ -> columnar_enabled).filter(schema -> (null == schema.field_include_list()) && // ie the entire thing is empty
                        (null == schema.field_exclude_list()) && (null == schema.field_include_pattern_list())
                        && (null == schema.field_type_include_list())
                        && (null == schema.field_exclude_pattern_list())
                        && (null == schema.field_type_exclude_list()))
                .map(schema -> BeanTemplateUtils.clone(schema)
                        .with(DataSchemaBean.ColumnarSchemaBean::field_type_include_list,
                                Arrays.asList("string", "number", "date"))
                        .done())
                .orElseGet(() -> maybe_user_columnar_schema.orElse(null)) // (NOTE: can only be null if columnar_enabled is false)
        ;

        final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> column_lookups_pretypes = Stream
                .of(columnar_enabled
                        ? createFieldIncludeLookups(
                                Optionals.ofNullable(columnar_schema.field_include_list()).stream(),
                                fn -> getKey(fn), field_lookups, enabled_not_analyzed, enabled_analyzed, true,
                                search_index_schema_override, type_override, mapper, index_type)
                        : Stream.<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>>empty(),
                        columnar_enabled
                                ? createFieldExcludeLookups(
                                        Optionals.ofNullable(columnar_schema.field_exclude_list()).stream(),
                                        fn -> getKey(fn), field_lookups, search_index_schema_override,
                                        type_override, mapper, index_type)
                                : Stream.<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>>empty(),
                        columnar_enabled
                                ? createFieldIncludeLookups(
                                        Optionals.ofNullable(columnar_schema.field_include_pattern_list())
                                                .stream(),
                                        fn -> Either.right(Tuples._2T(fn, "*")), field_lookups,
                                        enabled_not_analyzed, enabled_analyzed, true,
                                        search_index_schema_override, type_override, mapper, index_type)
                                : Stream.<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>>empty(),
                        columnar_enabled
                                ? createFieldIncludeLookups(
                                        Optionals.ofNullable(columnar_schema.field_type_include_list())
                                                .stream(),
                                        fn -> Either.right(Tuples._2T("*", fn)), field_lookups,
                                        enabled_not_analyzed, enabled_analyzed, true,
                                        search_index_schema_override, type_override, mapper, index_type)
                                : Stream.<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>>empty(),
                        columnar_enabled
                                ? createFieldExcludeLookups(
                                        Optionals.ofNullable(columnar_schema.field_exclude_pattern_list())
                                                .stream(),
                                        fn -> Either.right(Tuples._2T(fn, "*")), field_lookups,
                                        search_index_schema_override, type_override, mapper, index_type)
                                : Stream.<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>>empty(),
                        columnar_enabled
                                ? createFieldExcludeLookups(
                                        Optionals.ofNullable(columnar_schema.field_type_exclude_list())
                                                .stream(),
                                        fn -> Either.right(Tuples._2T("*", fn)), field_lookups,
                                        search_index_schema_override, type_override, mapper, index_type)
                                : Stream.<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>>empty(),

                        // Finally add the default columnar lookups to the unmentioned strings (ensures that *_* is at the end)

                        field_lookups.entrySet().stream()
                                .flatMap(kv -> createFieldIncludeLookups(Stream.of(kv.getKey().toString()),
                                        __ -> kv.getKey(), field_lookups, default_not_analyzed,
                                        default_analyzed, false, search_index_schema_override, type_override,
                                        mapper, index_type)))
                .flatMap(x -> x).collect(Collectors.toMap(t2 -> t2._1(), t2 -> t2._2(), (v1, v2) -> v1, // (ie ignore duplicates)
                        () -> new LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode>()));
        ;

        // Also any types that didn't map onto one of the fields or tokens:
        final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> column_lookups_types = type_override
                .entrySet().stream()
                // (filter - convert name/* to name/type and check if I've already created such an entry using the type map)
                .filter(kv -> !column_lookups_pretypes
                        .containsKey(kv.getKey().either(s -> s, t2 -> Tuples._2T(t2._1(), kv.getValue()))))
                .flatMap(kv -> createFieldIncludeLookups(Stream.of(kv.getKey().toString()),
                        __ -> kv.getKey().<Either<String, Tuple2<String, String>>>either(s -> Either.left(s),
                                t2 -> Either.right(Tuples._2T(t2._1(), kv.getValue()))),
                        field_lookups, default_not_analyzed, default_analyzed, false,
                        search_index_schema_override, type_override, mapper, index_type))
                .collect(Collectors.toMap(t2 -> t2._1(), t2 -> t2._2(), (v1, v2) -> v1,
                        () -> new LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode>()));

        final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> column_lookups = Stream
                .concat(column_lookups_pretypes.entrySet().stream(), column_lookups_types.entrySet().stream())
                .sorted((a, b) -> Integer.compare(sortKey(a.getKey()), sortKey(b.getKey())))
                .collect(Collectors.toMap(t2 -> t2.getKey(), t2 -> t2.getValue(), (v1, v2) -> v1,
                        () -> new LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode>()));

        final XContentBuilder properties = column_lookups.entrySet().stream()
                // properties not dynamic_templates
                .filter(kv -> kv.getKey().isLeft())
                // overwrite with version built using columns if it exists
                .map(kv -> Tuples._2T(kv.getKey(),
                        column_lookups.getOrDefault(kv.getKey(), kv.getValue())))
                .reduce(Optional.of(start.startObject("properties")) // add doc_schema if it exists
                        .map(props -> doc_schema
                                .map(ds -> Optionals.streamOf(ds.fields(), false)
                                        .reduce(props,
                                                Lambdas.wrap_u((acc, kv) -> acc.rawField(kv.getKey(),
                                                        kv.getValue().toString().getBytes())),
                                                (acc1, acc2) -> acc1 // shouldn't be possible
                                )).orElse(props)).get(),
                        Lambdas.wrap_u((acc, t2) -> acc.rawField(t2._1().left().value(),
                                t2._2().toString().getBytes())), // (left by construction) 
                        (acc1, acc2) -> acc1) // (not actually possible)
                .endObject();

        final XContentBuilder templates = column_lookups.entrySet().stream()
                // properties not dynamic_templates
                .filter(kv -> kv.getKey().isRight())
                // overwrite with version built using columns if it exists
                .map(kv -> Tuples._2T(kv.getKey(), column_lookups.getOrDefault(kv.getKey(), kv.getValue())))
                .reduce(properties.startArray("dynamic_templates"),
                        Lambdas.wrap_u((acc, t2) -> acc.startObject()
                                .rawField(getFieldNameFromMatchPair(t2._1().right().value()),
                                        t2._2().toString().getBytes()) // (right by construction)
                                .endObject()),
                        (acc1, acc2) -> acc1) // (not actually possible)
                .endArray();

        return templates;
    } catch (IOException e) {
        //Handle in-practice-impossible "IOException"
        return null;
    }
}

From source file:com.ikanow.aleph2.storage_service_hdfs.services.TestMockHdfsStorageSystem.java

/**
 * @param storage_service//from ww  w.  ja v  a2s.  co m
 * @param bucket
 * @param extra_suffixes - start with $ to indicate a new secondary buffer, else is a normal suffix 
 */
protected void setup_bucket(MockHdfsStorageService storage_service, final DataBucketBean bucket,
        List<String> extra_suffixes) {
    final FileContext dfs = storage_service.getUnderlyingPlatformDriver(FileContext.class, Optional.empty())
            .get();

    final String bucket_root = storage_service.getBucketRootPath() + "/" + bucket.full_name();

    //(first delete root path)
    try {
        dfs.delete(new Path(bucket_root), true);
    } catch (Exception e) {
    }

    Stream.concat(
            Arrays.asList("/managed_bucket", "/managed_bucket/logs", "/managed_bucket/logs/harvest",
                    "/managed_bucket/logs/enrichment", "/managed_bucket/logs/storage", "/managed_bucket/assets",
                    "/managed_bucket/import", "/managed_bucket/import/stored",
                    "/managed_bucket/import/stored/raw/current", "/managed_bucket/import/stored/json/current",
                    "/managed_bucket/import/stored/processed/current",
                    "/managed_bucket/import/transient/current", "/managed_bucket/import/ready",
                    "/managed_bucket/import/temp").stream(),
            extra_suffixes.stream()
                    .flatMap(
                            s -> s.startsWith("$")
                                    ? Stream.of("/managed_bucket/import/stored/raw/" + s.substring(1),
                                            "/managed_bucket/import/stored/json/" + s.substring(1),
                                            "/managed_bucket/import/stored/processed/" + s.substring(1),
                                            "/managed_bucket/import/transient/" + s.substring(1))
                                    : Stream.of(s)))
            .map(s -> new Path(bucket_root + s))
            .forEach(Lambdas.wrap_consumer_u(p -> dfs.mkdir(p, FsPermission.getDefault(), true)));
}

From source file:org.hawkular.metrics.core.service.MetricsServiceITest.java

@Test
public void findSimpleGaugeStatsByTags() {
    NumericDataPointCollector.createPercentile = InMemoryPercentileWrapper::new;

    String tenantId = "findGaugeStatsByTags";
    DateTime start = now().minusMinutes(10);

    Metric<Double> m1 = new Metric<>(new MetricId<>(tenantId, GAUGE, "M1"),
            asList(new DataPoint<>(start.getMillis(), 12.23),
                    new DataPoint<>(start.plusMinutes(1).getMillis(), 9.745),
                    new DataPoint<>(start.plusMinutes(2).getMillis(), 14.01),
                    new DataPoint<>(start.plusMinutes(3).getMillis(), 16.18),
                    new DataPoint<>(start.plusMinutes(4).getMillis(), 18.94)));
    doAction(() -> metricsService.addDataPoints(GAUGE, Observable.just(m1)));
    doAction(() -> metricsService.addTags(m1, ImmutableMap.of("type", "cpu_usage", "node", "server1")));

    Metric<Double> m2 = new Metric<>(new MetricId<>(tenantId, GAUGE, "M2"),
            asList(new DataPoint<>(start.getMillis(), 15.47),
                    new DataPoint<>(start.plusMinutes(1).getMillis(), 8.08),
                    new DataPoint<>(start.plusMinutes(2).getMillis(), 14.39),
                    new DataPoint<>(start.plusMinutes(3).getMillis(), 17.76),
                    new DataPoint<>(start.plusMinutes(4).getMillis(), 17.502)));
    doAction(() -> metricsService.addDataPoints(GAUGE, Observable.just(m2)));
    doAction(() -> metricsService.addTags(m2, ImmutableMap.of("type", "cpu_usage", "node", "server2")));

    Metric<Double> m3 = new Metric<>(new MetricId<>(tenantId, GAUGE, "M3"),
            asList(new DataPoint<>(start.getMillis(), 11.456),
                    new DataPoint<>(start.plusMinutes(1).getMillis(), 18.32)));
    doAction(() -> metricsService.addDataPoints(GAUGE, Observable.just(m3)));
    doAction(() -> metricsService.addTags(m3, ImmutableMap.of("type", "cpu_usage", "node", "server3")));

    Buckets buckets = Buckets.fromCount(start.getMillis(), start.plusMinutes(5).getMillis(), 1);
    Map<String, String> tagFilters = ImmutableMap.of("type", "cpu_usage", "node", "server1|server2");

    List<List<NumericBucketPoint>> actual = getOnNextEvents(
            () -> metricsService.findNumericStats(tenantId, MetricType.GAUGE, tagFilters, start.getMillis(),
                    start.plusMinutes(5).getMillis(), buckets, Collections.emptyList(), false));

    assertEquals(actual.size(), 1);/*from  ww  w  .  j  ava2s. c  om*/

    List<NumericBucketPoint> expected = Arrays
            .asList(createSingleBucket(Stream.concat(m1.getDataPoints().stream(), m2.getDataPoints().stream())
                    .collect(Collectors.toList()), start, start.plusMinutes(5)));

    assertNumericBucketsEquals(actual.get(0), expected);
}

From source file:org.jamocha.dn.compiler.pathblocks.PathBlocks.java

protected static List<PathRule> createOutput(final List<Either<Rule, ExistentialProxy>> rules,
        final PathBlockSet resultBlockSet) {
    final Function<? super Block, ? extends Integer> characteristicNumber = block -> block
            .getFlatFilterInstances().size() / block.getRulesOrProxies().size();
    final TreeMap<Integer, CursorableLinkedList<Block>> blockMap = resultBlockSet.getBlocks().stream()
            .collect(groupingBy(characteristicNumber, TreeMap::new, toCollection(CursorableLinkedList::new)));
    // iterate over all the filter proxies ever used
    for (final FilterProxy filterProxy : FilterProxy.getFilterProxies()) {
        final Set<ExistentialProxy> existentialProxies = filterProxy.getProxies();
        // determine the largest characteristic number of the blocks containing filter instances
        // of one of the existential proxies (choice is arbitrary, since the filters and the
        // conflicts are identical if they belong to the same filter).
        final OptionalInt optMax = resultBlockSet.getRuleInstanceToBlocks()
                .computeIfAbsent(Either.right(existentialProxies.iterator().next()), newHashSet()).stream()
                .mapToInt(composeToInt(characteristicNumber, Integer::intValue)).max();
        if (!optMax.isPresent())
            continue;
        final int eCN = optMax.getAsInt();
        // get the list to append the blocks using the existential closure filter INSTANCE to
        final CursorableLinkedList<Block> targetList = blockMap.get(eCN);
        // for every existential part
        for (final ExistentialProxy existentialProxy : existentialProxies) {
            final FilterInstance exClosure = existentialProxy.getExistentialClosure();
            // create a list storing the blocks to move
            final List<Block> toMove = new ArrayList<>();
            for (final CursorableLinkedList<Block> blockList : blockMap.headMap(eCN, true).values()) {
                // iterate over the blocks in the current list
                for (final ListIterator<Block> iterator = blockList.listIterator(); iterator.hasNext();) {
                    final Block current = iterator.next();
                    // if the current block uses the current existential closure filter
                    // INSTANCE, it has to be moved
                    if (current.getFlatFilterInstances().contains(exClosure)) {
                        iterator.remove();
                        toMove.add(current);
                    }//w  ww  . ja  va2 s  .com
                }
            }
            // append the blocks to be moved (they were only removed so far)
            targetList.addAll(toMove);
        }
    }
    final Set<FilterInstance> constructedFIs = new HashSet<>();
    final Map<Either<Rule, ExistentialProxy>, Map<FilterInstance, Set<FilterInstance>>> ruleToJoinedWith = new HashMap<>();
    final Map<Set<FilterInstance>, PathFilterList> joinedWithToComponent = new HashMap<>();
    // at this point, the network can be constructed
    for (final CursorableLinkedList<Block> blockList : blockMap.values()) {
        for (final Block block : blockList) {
            final List<Either<Rule, ExistentialProxy>> blockRules = Lists
                    .newArrayList(block.getRulesOrProxies());
            final Set<List<FilterInstance>> filterInstanceColumns = Block
                    .getFilterInstanceColumns(block.getFilters(), block.getRuleToFilterToRow(), blockRules);
            // since we are considering blocks, it is either the case that all filter
            // instances of the column have been constructed or none of them have
            final PathSharedListWrapper sharedListWrapper = new PathSharedListWrapper(blockRules.size());
            final Map<Either<Rule, ExistentialProxy>, PathSharedList> ruleToSharedList = IntStream
                    .range(0, blockRules.size()).boxed()
                    .collect(toMap(blockRules::get, sharedListWrapper.getSharedSiblings()::get));
            final List<List<FilterInstance>> columnsToConstruct, columnsAlreadyConstructed;
            {
                final Map<Boolean, List<List<FilterInstance>>> partition = filterInstanceColumns.stream()
                        .collect(partitioningBy(column -> Collections.disjoint(column, constructedFIs)));
                columnsAlreadyConstructed = partition.get(Boolean.FALSE);
                columnsToConstruct = partition.get(Boolean.TRUE);
            }

            if (!columnsAlreadyConstructed.isEmpty()) {
                final Map<PathSharedList, LinkedHashSet<PathFilterList>> sharedPart = new HashMap<>();
                for (final List<FilterInstance> column : columnsAlreadyConstructed) {
                    for (final FilterInstance fi : column) {
                        sharedPart
                                .computeIfAbsent(ruleToSharedList.get(fi.getRuleOrProxy()), newLinkedHashSet())
                                .add(joinedWithToComponent
                                        .get(ruleToJoinedWith.get(fi.getRuleOrProxy()).get(fi)));
                    }
                }
                sharedListWrapper.addSharedColumns(sharedPart);
            }

            for (final List<FilterInstance> column : columnsToConstruct) {
                sharedListWrapper.addSharedColumn(column.stream().collect(
                        toMap(fi -> ruleToSharedList.get(fi.getRuleOrProxy()), FilterInstance::convert)));
            }
            constructedFIs.addAll(block.getFlatFilterInstances());
            for (final Entry<Either<Rule, ExistentialProxy>, Map<Filter, FilterInstancesSideBySide>> entry : block
                    .getRuleToFilterToRow().entrySet()) {
                final Either<Rule, ExistentialProxy> rule = entry.getKey();
                final Set<FilterInstance> joined = entry.getValue().values().stream()
                        .flatMap(sbs -> sbs.getInstances().stream()).collect(toSet());
                final Map<FilterInstance, Set<FilterInstance>> joinedWithMapForThisRule = ruleToJoinedWith
                        .computeIfAbsent(rule, newHashMap());
                joined.forEach(fi -> joinedWithMapForThisRule.put(fi, joined));
                joinedWithToComponent.put(joined, ruleToSharedList.get(rule));
            }
        }
    }
    final List<PathRule> pathRules = new ArrayList<>();
    for (final Either<Rule, ExistentialProxy> either : rules) {
        if (either.isRight()) {
            continue;
        }
        final List<PathFilterList> pathFilterLists = Stream
                .concat(either.left().get().existentialProxies.values().stream().map(p -> Either.right(p)),
                        Stream.of(either))
                .flatMap(e -> ruleToJoinedWith.getOrDefault(e, Collections.emptyMap()).values().stream()
                        .distinct())
                .map(joinedWithToComponent::get).collect(toList());
        pathRules.add(either.left().get().getOriginal().toPathRule(PathFilterList.toSimpleList(pathFilterLists),
                pathFilterLists.size() > 1 ? InitialFactPathsFinder.gather(pathFilterLists)
                        : Collections.emptySet()));
    }
    return pathRules;
}

From source file:com.ikanow.aleph2.analytics.services.AnalyticsContext.java

@Override
public CompletableFuture<Map<String, String>> getAnalyticsLibraries(final Optional<DataBucketBean> bucket,
        final Collection<AnalyticThreadJobBean> jobs) {
    if (_state_name == State.IN_TECHNOLOGY) {

        final String name_or_id = jobs.stream().findFirst().get().analytic_technology_name_or_id();

        final SingleQueryComponent<SharedLibraryBean> tech_query = CrudUtils.anyOf(SharedLibraryBean.class)
                .when(SharedLibraryBean::_id, name_or_id).when(SharedLibraryBean::path_name, name_or_id);

        final List<SingleQueryComponent<SharedLibraryBean>> other_libs = Stream
                .concat(Optional.ofNullable(jobs.stream().findFirst().get().module_name_or_id()).map(Stream::of)
                        .orElseGet(Stream::empty),
                        Optionals.ofNullable(jobs).stream()
                                .flatMap(job -> Optionals.ofNullable(job.library_names_or_ids()).stream()))
                .map(name -> {//from  w w w  .  j  a  v  a  2  s .  com
                    return CrudUtils.anyOf(SharedLibraryBean.class).when(SharedLibraryBean::_id, name)
                            .when(SharedLibraryBean::path_name, name);
                }).collect(Collector.of(LinkedList::new, LinkedList::add, (left, right) -> {
                    left.addAll(right);
                    return left;
                }));

        @SuppressWarnings("unchecked")
        final MultiQueryComponent<SharedLibraryBean> spec = CrudUtils.<SharedLibraryBean>anyOf(tech_query,
                other_libs.toArray(new SingleQueryComponent[other_libs.size()]));

        // Get the names or ids, get the shared libraries, get the cached ids (must be present)

        return this._core_management_db.readOnlyVersion().getSharedLibraryStore()
                .getObjectsBySpec(spec, Arrays.asList(JsonUtils._ID, "path_name"), true).thenApply(cursor -> {
                    return StreamSupport.stream(cursor.spliterator(), false)
                            .collect(Collectors.<SharedLibraryBean, String, String>toMap(lib -> lib.path_name(),
                                    lib -> _globals.local_cached_jar_dir() + "/"
                                            + JarCacheUtils.buildCachedJarName(lib)));
                });
    } else {
        throw new RuntimeException(ErrorUtils.TECHNOLOGY_NOT_MODULE);
    }
}

From source file:org.hawkular.metrics.core.service.MetricsServiceITest.java

@Test
public void findSimpleCounterStats() {
    //Setup the counter data
    NumericDataPointCollector.createPercentile = InMemoryPercentileWrapper::new;

    Random r = new Random(123);
    List<Long> randomList = new ArrayList<>();
    for (int i = 0; i < 100; i++) {
        randomList.add((long) r.nextInt(100));
    }/*from  w  ww  .ja  va2 s . c  o  m*/
    Collections.sort(randomList);
    Iterator<Long> randoms = randomList.iterator();

    String tenantId = "findCounterStats";
    DateTime start = now().minusMinutes(10);

    Metric<Long> c1 = new Metric<>(new MetricId<>(tenantId, COUNTER, "C1"),
            asList(new DataPoint<>(start.getMillis(), 222L + randoms.next()),
                    new DataPoint<>(start.plusMinutes(1).getMillis(), 224L + randoms.next()),
                    new DataPoint<>(start.plusMinutes(2).getMillis(), 226L + randoms.next()),
                    new DataPoint<>(start.plusMinutes(3).getMillis(), 228L + randoms.next()),
                    new DataPoint<>(start.plusMinutes(4).getMillis(), 229L + randoms.next())));
    doAction(() -> metricsService.addDataPoints(COUNTER, Observable.just(c1)));
    doAction(() -> metricsService.addTags(c1, ImmutableMap.of("type", "counter_cpu_usage", "node", "server1")));

    Metric<Long> c2 = new Metric<>(new MetricId<>(tenantId, COUNTER, "C2"),
            asList(new DataPoint<>(start.getMillis(), 150L),
                    new DataPoint<>(start.plusMinutes(1).getMillis(), 153L + randoms.next()),
                    new DataPoint<>(start.plusMinutes(2).getMillis(), 156L + randoms.next()),
                    new DataPoint<>(start.plusMinutes(3).getMillis(), 159L + randoms.next()),
                    new DataPoint<>(start.plusMinutes(4).getMillis(), 162L + randoms.next())));
    doAction(() -> metricsService.addDataPoints(COUNTER, Observable.just(c2)));
    doAction(() -> metricsService.addTags(c2, ImmutableMap.of("type", "counter_cpu_usage", "node", "server2")));

    Metric<Long> c3 = new Metric<>(new MetricId<>(tenantId, COUNTER, "C3"),
            asList(new DataPoint<>(start.getMillis(), 11456L + randoms.next()),
                    new DataPoint<>(start.plusMinutes(1).getMillis(), 183332L + randoms.next())));
    doAction(() -> metricsService.addDataPoints(COUNTER, Observable.just(c3)));
    doAction(() -> metricsService.addTags(c3, ImmutableMap.of("type", "counter_cpu_usage", "node", "server3")));

    Buckets buckets = Buckets.fromCount(start.getMillis(), start.plusMinutes(5).getMillis(), 1);
    Map<String, String> tagFilters = ImmutableMap.of("type", "counter_cpu_usage", "node", "server1|server2");

    List<DataPoint<Double>> c1Rate = getOnNextEvents(() -> metricsService.findRateData(c1.getMetricId(),
            start.getMillis(), start.plusMinutes(5).getMillis()));

    List<DataPoint<Double>> c2Rate = getOnNextEvents(() -> metricsService.findRateData(c2.getMetricId(),
            start.getMillis(), start.plusMinutes(5).getMillis()));

    //Test simple counter stats
    List<List<NumericBucketPoint>> actualCounterStatsByTag = getOnNextEvents(
            () -> metricsService.findNumericStats(tenantId, MetricType.COUNTER, tagFilters, start.getMillis(),
                    start.plusMinutes(5).getMillis(), buckets, Collections.emptyList(), false));
    assertEquals(actualCounterStatsByTag.size(), 1);

    List<List<NumericBucketPoint>> actualCounterStatsById = getOnNextEvents(
            () -> metricsService.findNumericStats(tenantId, MetricType.COUNTER, asList("C1", "C2"),
                    start.getMillis(), start.plusMinutes(5).getMillis(), buckets, Collections.emptyList(),
                    false));
    assertEquals(actualCounterStatsById.size(), 1);

    List<NumericBucketPoint> expectedCounterStats = Arrays
            .asList(createSingleBucket(Stream.concat(c1.getDataPoints().stream(), c2.getDataPoints().stream())
                    .collect(Collectors.toList()), start, start.plusMinutes(5)));

    assertNumericBucketsEquals(actualCounterStatsByTag.get(0), expectedCounterStats);
    assertNumericBucketsEquals(actualCounterStatsById.get(0), expectedCounterStats);

    //Test stacked counter stats
    List<List<NumericBucketPoint>> actualStackedCounterStatsByTag = getOnNextEvents(
            () -> metricsService.findNumericStats(tenantId, MetricType.COUNTER, tagFilters, start.getMillis(),
                    start.plusMinutes(5).getMillis(), buckets, Collections.emptyList(), true));
    assertEquals(actualStackedCounterStatsByTag.size(), 1);

    List<List<NumericBucketPoint>> actualStackedCounterStatsById = getOnNextEvents(
            () -> metricsService.findNumericStats(tenantId, MetricType.COUNTER, asList("C1", "C2"),
                    start.getMillis(), start.plusMinutes(5).getMillis(), buckets, Collections.emptyList(),
                    true));
    assertEquals(actualStackedCounterStatsByTag.size(), 1);

    NumericBucketPoint collectorC1 = createSingleBucket(c1.getDataPoints(), start, start.plusMinutes(5));
    NumericBucketPoint collectorC2 = createSingleBucket(c2.getDataPoints(), start, start.plusMinutes(5));

    final Sum min = new Sum();
    final Sum average = new Sum();
    final Sum median = new Sum();
    final Sum max = new Sum();
    Observable.just(collectorC1, collectorC2).forEach(d -> {
        min.increment(d.getMin());
        max.increment(d.getMax());
        average.increment(d.getAvg());
        median.increment(d.getMedian());
    });
    NumericBucketPoint expectedStackedRateBucketPoint = new NumericBucketPoint.Builder(start.getMillis(),
            start.plusMinutes(5).getMillis()).setMin(min.getResult()).setMax(max.getResult())
                    .setAvg(average.getResult()).setMedian(median.getResult()).setSamples(2).build();
    List<NumericBucketPoint> expectedStackedCounterStatsList = new ArrayList<NumericBucketPoint>();
    expectedStackedCounterStatsList.add(expectedStackedRateBucketPoint);

    assertNumericBucketsEquals(actualStackedCounterStatsByTag.get(0), expectedStackedCounterStatsList);
    assertNumericBucketsEquals(actualStackedCounterStatsById.get(0), expectedStackedCounterStatsList);

    //Test simple counter rate stats
    List<List<NumericBucketPoint>> actualCounterRateStatsByTag = getOnNextEvents(
            () -> metricsService.findNumericStats(tenantId, MetricType.COUNTER_RATE, tagFilters,
                    start.getMillis(), start.plusMinutes(5).getMillis(), buckets, Collections.emptyList(),
                    false));
    assertEquals(actualCounterRateStatsByTag.size(), 1);

    List<List<NumericBucketPoint>> actualCounterRateStatsById = getOnNextEvents(
            () -> metricsService.findNumericStats(tenantId, MetricType.COUNTER_RATE, asList("C1", "C2"),
                    start.getMillis(), start.plusMinutes(5).getMillis(), buckets, Collections.emptyList(),
                    false));
    assertEquals(actualCounterRateStatsById.size(), 1);

    List<NumericBucketPoint> expectedCounterRateStats = Arrays.asList(
            createSingleBucket(Stream.concat(c1Rate.stream(), c2Rate.stream()).collect(Collectors.toList()),
                    start, start.plusMinutes(5)));

    assertNumericBucketsEquals(actualCounterRateStatsByTag.get(0), expectedCounterRateStats);
    assertNumericBucketsEquals(actualCounterRateStatsById.get(0), expectedCounterRateStats);

    //Test stacked counter rate stats
    List<List<NumericBucketPoint>> actualStackedCounterRateStatsByTag = getOnNextEvents(
            () -> metricsService.findNumericStats(tenantId, MetricType.COUNTER_RATE, tagFilters,
                    start.getMillis(), start.plusMinutes(5).getMillis(), buckets, Collections.emptyList(),
                    true));
    assertEquals(actualStackedCounterStatsByTag.size(), 1);

    List<List<NumericBucketPoint>> actualStackedCounterRateStatsById = getOnNextEvents(
            () -> metricsService.findNumericStats(tenantId, MetricType.COUNTER_RATE, asList("C1", "C2"),
                    start.getMillis(), start.plusMinutes(5).getMillis(), buckets, Collections.emptyList(),
                    true));
    assertEquals(actualStackedCounterStatsByTag.size(), 1);

    NumericBucketPoint collectorC1Rate = createSingleBucket(c1Rate, start, start.plusMinutes(5));
    NumericBucketPoint collectorC2Rate = createSingleBucket(c2Rate, start, start.plusMinutes(5));

    final Sum counterRateMin = new Sum();
    final Sum counterRateMax = new Sum();
    final Sum counterRateAverage = new Sum();
    final Sum counterRateMedian = new Sum();
    Observable.just(collectorC1Rate, collectorC2Rate).forEach(d -> {
        counterRateMin.increment(d.getMin());
        counterRateMax.increment(d.getMax());
        counterRateAverage.increment(d.getAvg());
        counterRateMedian.increment(d.getMedian());
    });
    NumericBucketPoint expectedStackedCounterRateBucketPoint = new NumericBucketPoint.Builder(start.getMillis(),
            start.plusMinutes(5).getMillis()).setMin(counterRateMin.getResult())
                    .setMax(counterRateMax.getResult()).setAvg(counterRateAverage.getResult())
                    .setMedian(counterRateMedian.getResult()).setSamples(2).build();
    List<NumericBucketPoint> expectedStackedCounterRateStatsList = new ArrayList<NumericBucketPoint>();
    expectedStackedCounterRateStatsList.add(expectedStackedCounterRateBucketPoint);

    assertNumericBucketsEquals(actualStackedCounterRateStatsByTag.get(0), expectedStackedCounterRateStatsList);
    assertNumericBucketsEquals(actualStackedCounterRateStatsById.get(0), expectedStackedCounterRateStatsList);
}

From source file:com.uber.hoodie.TestCleaner.java

private Stream<Pair<String, String>> convertPathToFileIdWithCommitTime(final HoodieTableMetaClient metaClient,
        List<String> paths) {
    Predicate<String> roFilePredicate = path -> path
            .contains(metaClient.getTableConfig().getROFileFormat().getFileExtension());
    Predicate<String> rtFilePredicate = path -> path
            .contains(metaClient.getTableConfig().getRTFileFormat().getFileExtension());
    Stream<Pair<String, String>> stream1 = paths.stream().filter(roFilePredicate).map(fullPath -> {
        String fileName = Paths.get(fullPath).getFileName().toString();
        return Pair.of(FSUtils.getFileId(fileName), FSUtils.getCommitTime(fileName));
    });/*from   ww w .  jav  a 2  s .c  o m*/
    Stream<Pair<String, String>> stream2 = paths.stream().filter(rtFilePredicate).map(path -> {
        return Pair.of(FSUtils.getFileIdFromLogPath(new Path(path)),
                FSUtils.getBaseCommitTimeFromLogPath(new Path(path)));
    });
    return Stream.concat(stream1, stream2);
}

From source file:org.hawkular.metrics.core.service.MetricsServiceITest.java

@Test
public void findSimpleGaugeStatsByMetricNames() {
    NumericDataPointCollector.createPercentile = InMemoryPercentileWrapper::new;

    String tenantId = "findGaugeStatsByMetricNames";
    DateTime start = now().minusMinutes(10);

    Metric<Double> m1 = new Metric<>(new MetricId<>(tenantId, GAUGE, "M1"), getDataPointList("M1", start));
    doAction(() -> metricsService.addDataPoints(GAUGE, Observable.just(m1)));

    Metric<Double> m2 = new Metric<>(new MetricId<>(tenantId, GAUGE, "M2"), getDataPointList("M2", start));
    doAction(() -> metricsService.addDataPoints(GAUGE, Observable.just(m2)));

    Metric<Double> m3 = new Metric<>(new MetricId<>(tenantId, GAUGE, "M3"), getDataPointList("M3", start));
    doAction(() -> metricsService.addDataPoints(GAUGE, Observable.just(m3)));

    Buckets buckets = Buckets.fromCount(start.getMillis(), start.plusMinutes(5).getMillis(), 1);

    List<List<NumericBucketPoint>> actual = getOnNextEvents(() -> metricsService.findNumericStats(tenantId,
            MetricType.GAUGE, asList("M1", "M2"), start.getMillis(), start.plusMinutes(5).getMillis(), buckets,
            Collections.emptyList(), false));

    assertEquals(actual.size(), 1);//from   ww w .j  a v  a  2 s .  c om

    List<NumericBucketPoint> expected = Arrays
            .asList(createSingleBucket(Stream.concat(m1.getDataPoints().stream(), m2.getDataPoints().stream())
                    .collect(Collectors.toList()), start, start.plusMinutes(5)));

    assertNumericBucketsEquals(actual.get(0), expected);
}

From source file:io.druid.indexing.jdbc.supervisor.JDBCSupervisor.java

private void updateCurrentOffsets() throws InterruptedException, ExecutionException, TimeoutException {
    log.info("updateCurrentOffsets called");
    final List<ListenableFuture<Void>> futures = Stream
            .concat(taskGroups.values().stream().flatMap(taskGroup -> taskGroup.tasks.entrySet().stream()),
                    pendingCompletionTaskGroups.values().stream().flatMap(List::stream)
                            .flatMap(taskGroup -> taskGroup.tasks.entrySet().stream()))
            .map(task -> Futures.transform(taskClient.getCurrentOffsetsAsync(task.getKey(), false),
                    (Function<Map<Integer, Long>, Void>) (currentOffsets) -> {
                        log.info("TaskClient currentOffsets is [%s]", currentOffsets);
                        if (currentOffsets != null && !currentOffsets.isEmpty()) {
                            task.getValue().currentOffsets = currentOffsets;
                            log.info("task.getValue().currentOffsets  is " + task.getValue().currentOffsets);
                        } else {

                        }//from www. jav a2 s  .co  m
                        return null;
                    }))
            .collect(Collectors.toList());
    log.info("CurrentOffsets size is " + taskGroups.values().size());

    Futures.successfulAsList(futures).get(futureTimeoutInSeconds, TimeUnit.SECONDS);
}