Example usage for java.util.stream Collectors summingLong

List of usage examples for java.util.stream Collectors summingLong

Introduction

In this page you can find the example usage for java.util.stream Collectors summingLong.

Prototype

public static <T> Collector<T, ?, Long> summingLong(ToLongFunction<? super T> mapper) 

Source Link

Document

Returns a Collector that produces the sum of a long-valued function applied to the input elements.

Usage

From source file:com.ikanow.aleph2.analytics.hadoop.assets.SampleReduceEnrichmentModule.java

@Override
public void onObjectBatch(Stream<Tuple2<Long, IBatchRecord>> batch, Optional<Integer> batch_size,
        Optional<JsonNode> grouping_key) {

    // Just to make it simple 

    // 2 different cases:

    // 1) If I'm a combiner or a single-step reducer, then count the batchs
    //    and emit (key, count)
    // 2) If I'm the second stage of a combine-reduce then sum the counts

    Patterns.match(_stage.get()).andAct().when(s -> s == Stage.map, __ -> {
        batch.forEach(obj -> {/*from   w  w  w  . j  a  v  a2  s. c o  m*/

            final JsonNode new_grouping_key = _key_fields.get().stream().reduce(_mapper.createObjectNode(),
                    (acc, v) -> {
                        final Optional<String> key_field = JsonUtils.getProperty(v, obj._2().getJson())
                                .filter(j -> j.isTextual()).map(j -> j.asText());
                        return key_field.map(kf -> acc.put(v.replaceAll("__+", "_").replace(".", "__"), kf))
                                .orElse(acc);
                    }, (acc1, acc2) -> acc1) // (not possible
            ;

            final ObjectNode to_output = _mapper.createObjectNode().put("count", 1);

            _logger.info("OUTPUT FROM MAP = " + to_output + " key " + new_grouping_key);

            _context.get().emitMutableObject(obj._1(), to_output, Optional.empty(),
                    Optional.of(new_grouping_key));

        });
    }).otherwise(s -> { // combine or reduce

        final long count = batch.map(b -> Optional.ofNullable(b._2().getJson().get("count"))
                .filter(j -> j.isNumber()).map(j -> j.asLong()).orElse(0L))
                .collect(Collectors.summingLong(l -> l));

        final ObjectNode to_output = ((s == Stage.reduce) ? ((ObjectNode) grouping_key.get().deepCopy())
                : _mapper.createObjectNode()).put("count", count);

        _logger.info("OUTPUT FROM COMBINE/REDUCE = " + to_output + " (stage=" + s + " key " + grouping_key);

        _context.get().emitMutableObject(0L, to_output, Optional.empty(),
                (s == Stage.reduce) ? Optional.empty() : grouping_key);
    });
}

From source file:org.apache.hadoop.hbase.quotas.FileArchiverNotifierImpl.java

/**
 * Computes the size of each store file in {@code storeFileNames}
 *//*  ww w .ja v  a 2  s. c  om*/
long getSizeOfStoreFiles(TableName tn, Set<StoreFileReference> storeFileNames) {
    return storeFileNames.stream().collect(Collectors.summingLong((sfr) -> getSizeOfStoreFile(tn, sfr)));
}

From source file:org.apache.hadoop.hbase.quotas.FileArchiverNotifierImpl.java

/**
 * Computes the size of the store files for a single region.
 *//*w ww . ja  v a  2  s.  com*/
long getSizeOfStoreFile(TableName tn, StoreFileReference storeFileName) {
    String regionName = storeFileName.getRegionName();
    return storeFileName.getFamilyToFilesMapping().entries().stream().collect(
            Collectors.summingLong((e) -> getSizeOfStoreFile(tn, regionName, e.getKey(), e.getValue())));
}

From source file:org.apache.hadoop.hbase.quotas.SnapshotQuotaObserverChore.java

/**
 * Sums the snapshot sizes for each namespace.
 *//* w  w  w . j  av a 2s.c  o m*/
Map<String, Long> groupSnapshotSizesByNamespace(Multimap<TableName, SnapshotWithSize> snapshotsWithSize) {
    return snapshotsWithSize.entries().stream().collect(Collectors.groupingBy(
            // Convert TableName into the namespace string
            (e) -> e.getKey().getNamespaceAsString(),
            // Sum the values for namespace
            Collectors.mapping(Map.Entry::getValue, Collectors.summingLong((sws) -> sws.getSize()))));
}

From source file:pl.otros.logview.api.gui.LogViewPanelWrapper.java

private void createReadingProgressBar() {
    progressBar = new JProgressBar(0, 100);
    progressBar.setStringPainted(true);/*w w  w. j  a  v  a  2  s.co  m*/
    progressBar.setString("Processed ? of ? [?%]");
    final Timer t = new Timer(500, e -> {
        LOGGER.trace("Updating reading progress");
        final LoadingDetails loadingDetails = logLoader.getLoadingDetails(dataTableModel);
        final List<LogLoadingSession> logLoadingSessions = loadingDetails.getLogLoadingSessions();
        final List<LoadStatistic> statistics = logLoadingSessions.stream().map(logLoader::getLoadStatistic)
                .collect(Collectors.toList());
        final Long position = statistics.stream().collect(Collectors.summingLong(LoadStatistic::getPosition));
        final Long total = statistics.stream().collect(Collectors.summingLong(LoadStatistic::getTotal));
        final float percent = (100f) * ((float) position / total);
        progressBar.setValue((int) percent);
        final String msg = String.format("Processed %s of %s [%.2f%%]",
                FileSize.convertToStringRepresentation(position), FileSize.convertToStringRepresentation(total),
                percent);
        LOGGER.trace("Updating progress bar with message {}", msg);
        progressBar.setString(msg);

        final String tooltip = "<HTML>" + statistics.stream()
                .map(s -> String.format("Processed %s of %s [%.2f%%]  - %s",
                        FileSize.convertToStringRepresentation(s.getPosition()),
                        FileSize.convertToStringRepresentation(s.getTotal()), s.getPercent(),
                        s.getSource().stringForm()))
                .collect(Collectors.joining("<BR/>")) + "</HTML>";
        progressBar.setToolTipText(tooltip);
    });
    t.setRepeats(true);
    t.setInitialDelay(1000);
    t.start();
    timer = Optional.of(t);
}