Example usage for org.apache.commons.lang3.mutable MutableLong longValue

List of usage examples for org.apache.commons.lang3.mutable MutableLong longValue

Introduction

In this page you can find the example usage for org.apache.commons.lang3.mutable MutableLong longValue.

Prototype

@Override
public long longValue() 

Source Link

Document

Returns the value of this MutableLong as a long.

Usage

From source file:com.hazelcast.jet.benchmark.trademonitor.FlinkTradeMonitor.java

public static void main(String[] args) throws Exception {
    if (args.length != 13) {
        System.err.println("Usage:");
        System.err.println("  " + FlinkTradeMonitor.class.getSimpleName()
                + " <bootstrap.servers> <topic> <offset-reset> <maxLagMs> <windowSizeMs> <slideByMs> <outputPath> <checkpointInterval> <checkpointUri> <doAsyncSnapshot> <stateBackend> <kafkaParallelism> <windowParallelism>");
        System.err.println("<stateBackend> - fs | rocksDb");
        System.exit(1);/*w  w  w  . j  a  va 2 s .com*/
    }
    String brokerUri = args[0];
    String topic = args[1];
    String offsetReset = args[2];
    int lagMs = Integer.parseInt(args[3]);
    int windowSize = Integer.parseInt(args[4]);
    int slideBy = Integer.parseInt(args[5]);
    String outputPath = args[6];
    int checkpointInt = Integer.parseInt(args[7]);
    String checkpointUri = args[8];
    boolean doAsyncSnapshot = Boolean.parseBoolean(args[9]);
    String stateBackend = args[10];
    int kafkaParallelism = Integer.parseInt(args[11]);
    int windowParallelism = Integer.parseInt(args[12]);

    System.out.println("bootstrap.servers: " + brokerUri);
    System.out.println("topic: " + topic);
    System.out.println("offset-reset: " + offsetReset);
    System.out.println("lag: " + lagMs);
    System.out.println("windowSize: " + windowSize);
    System.out.println("slideBy: " + slideBy);
    System.out.println("outputPath: " + outputPath);
    System.out.println("checkpointInt: " + checkpointInt);
    System.out.println("checkpointUri: " + checkpointUri);
    System.out.println("doAsyncSnapshot: " + doAsyncSnapshot);
    System.out.println("stateBackend: " + stateBackend);
    System.out.println("kafkaParallelism: " + kafkaParallelism);
    System.out.println("windowParallelism: " + windowParallelism);

    // set up the execution environment
    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
    if (checkpointInt > 0) {
        env.enableCheckpointing(checkpointInt);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(checkpointInt);
    }
    env.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 5000));
    if ("fs".equalsIgnoreCase(stateBackend)) {
        env.setStateBackend(new FsStateBackend(checkpointUri, doAsyncSnapshot));
    } else if ("rocksDb".equalsIgnoreCase(stateBackend)) {
        env.setStateBackend(new RocksDBStateBackend(checkpointUri));
    } else {
        System.err.println("Bad value for stateBackend: " + stateBackend);
        System.exit(1);
    }

    DeserializationSchema<Trade> schema = new AbstractDeserializationSchema<Trade>() {
        TradeDeserializer deserializer = new TradeDeserializer();

        @Override
        public Trade deserialize(byte[] message) throws IOException {
            return deserializer.deserialize(null, message);
        }
    };

    DataStreamSource<Trade> trades = env
            .addSource(new FlinkKafkaConsumer010<>(topic, schema, getKafkaProperties(brokerUri, offsetReset)))
            .setParallelism(kafkaParallelism);
    AssignerWithPeriodicWatermarks<Trade> timestampExtractor = new BoundedOutOfOrdernessTimestampExtractor<Trade>(
            Time.milliseconds(lagMs)) {
        @Override
        public long extractTimestamp(Trade element) {
            return element.getTime();
        }
    };

    WindowAssigner window = windowSize == slideBy ? TumblingEventTimeWindows.of(Time.milliseconds(windowSize))
            : SlidingEventTimeWindows.of(Time.milliseconds(windowSize), Time.milliseconds(slideBy));

    trades.assignTimestampsAndWatermarks(timestampExtractor).keyBy((Trade t) -> t.getTicker()).window(window)
            .aggregate(new AggregateFunction<Trade, MutableLong, Long>() {

                @Override
                public MutableLong createAccumulator() {
                    return new MutableLong();
                }

                @Override
                public MutableLong add(Trade value, MutableLong accumulator) {
                    accumulator.increment();
                    return accumulator;
                }

                @Override
                public MutableLong merge(MutableLong a, MutableLong b) {
                    a.setValue(Math.addExact(a.longValue(), b.longValue()));
                    return a;
                }

                @Override
                public Long getResult(MutableLong accumulator) {
                    return accumulator.longValue();
                }
            }, new WindowFunction<Long, Tuple5<String, String, Long, Long, Long>, String, TimeWindow>() {
                @Override
                public void apply(String key, TimeWindow window, Iterable<Long> input,
                        Collector<Tuple5<String, String, Long, Long, Long>> out) throws Exception {
                    long timeMs = System.currentTimeMillis();
                    long count = input.iterator().next();
                    long latencyMs = timeMs - window.getEnd() - lagMs;
                    out.collect(
                            new Tuple5<>(Instant.ofEpochMilli(window.getEnd()).atZone(ZoneId.systemDefault())
                                    .toLocalTime().toString(), key, count, timeMs, latencyMs));
                }
            }).setParallelism(windowParallelism).writeAsCsv(outputPath, WriteMode.OVERWRITE);

    env.execute("Trade Monitor Example");
}

From source file:com.cg.mapreduce.fpgrowth.mahout.fpm.fpgrowth2.FPGrowthIds.java

/** 
 * Run FP Growth recursively on tree, for the given target attribute
 *//* w  w w.j  a v  a2s  . com*/
private static FrequentPatternMaxHeap growth(FPTree tree, MutableLong minSupportMutable, int k,
        int currentAttribute, StatusUpdater updater) {

    long currentAttributeCount = tree.headerCount(currentAttribute);

    if (currentAttributeCount < minSupportMutable.longValue()) {
        return new FrequentPatternMaxHeap(k, true);
    }

    FPTree condTree = tree.createMoreFreqConditionalTree(currentAttribute);

    Pair<FPTree, FPTree> pAndQ = condTree.splitSinglePrefix();
    FPTree p = pAndQ.getFirst();
    FPTree q = pAndQ.getSecond();

    FrequentPatternMaxHeap prefixPats = null;
    if (p != null) {
        prefixPats = mineSinglePrefix(p, k);
    }

    FrequentPatternMaxHeap suffixPats = new FrequentPatternMaxHeap(k, true);

    Pattern thisPat = new Pattern();
    thisPat.add(currentAttribute, currentAttributeCount);
    suffixPats.insert(thisPat);

    for (int attr : q.attrIterableRev()) {
        mergeHeap(suffixPats, growth(q, minSupportMutable, k, attr, updater), currentAttribute,
                currentAttributeCount, true);
    }

    if (prefixPats != null) {
        return cross(prefixPats, suffixPats, k);
    }

    return suffixPats;
}

From source file:com.cg.mapreduce.fpgrowth.mahout.fpm.fpgrowth2.FPGrowthIds.java

/**
 * Top K FpGrowth Algorithm//from  w  w w  .j a v  a 2 s. c om
 *
 * @param tree
 *          to be mined
 * @param minSupportValue
 *          minimum support of the pattern to keep
 * @param k
 *          Number of top frequent patterns to keep
 * @param requiredFeatures
 *          Set of integer id's of features to mine
 * @param outputCollector
 *          the Collector class which converts the given frequent pattern in
 *          integer to A
 * @return Top K Frequent Patterns for each feature and their support
 */
private static Map<Integer, FrequentPatternMaxHeap> fpGrowth(FPTree tree, long minSupportValue, int k,
        IntArrayList requiredFeatures, TopKPatternsOutputConverter<Integer> outputCollector,
        StatusUpdater updater) throws IOException {

    Map<Integer, FrequentPatternMaxHeap> patterns = Maps.newHashMap();
    requiredFeatures.sort();
    for (int attribute : tree.attrIterableRev()) {
        if (requiredFeatures.binarySearch(attribute) >= 0) {
            log.info("Mining FTree Tree for all patterns with {}", attribute);
            MutableLong minSupport = new MutableLong(minSupportValue);
            FrequentPatternMaxHeap frequentPatterns = growth(tree, minSupport, k, attribute, updater);
            patterns.put(attribute, frequentPatterns);
            outputCollector.collect(attribute, frequentPatterns);

            minSupportValue = Math.max(minSupportValue, minSupport.longValue() / 2);
            //System.out.println( patterns.get(attribute).count()+": "+patterns.get(attribute).leastSupport());
            log.info("Found {} Patterns with Least Support {}", patterns.get(attribute).count(),
                    patterns.get(attribute).leastSupport());
        }
    }
    return patterns;
}

From source file:info.mikaelsvensson.devtools.analysis.db2eventlog.QueryStatistics.java

long getAccumulatedExecutionTime() {
    long sum = 0;
    for (MutableLong mutableLong : operationsTime.values()) {
        sum += mutableLong.longValue();
    }/*w  ww.  ja v  a 2s  .com*/
    return sum;
}

From source file:com.cg.mapreduce.fpgrowth.mahout.fpm.fpgrowth.FPGrowth.java

private static FrequentPatternMaxHeap growth(FPTree tree, MutableLong minSupportMutable, int k,
        FPTreeDepthCache treeCache, int level, int currentAttribute, StatusUpdater updater) {

    FrequentPatternMaxHeap frequentPatterns = new FrequentPatternMaxHeap(k, true);

    int i = Arrays.binarySearch(tree.getHeaderTableAttributes(), currentAttribute);
    if (i < 0) {
        return frequentPatterns;
    }/*from   w  w  w  .  j  ava  2  s  .c  om*/

    int headerTableCount = tree.getHeaderTableCount();

    while (i < headerTableCount) {
        int attribute = tree.getAttributeAtIndex(i);
        long count = tree.getHeaderSupportCount(attribute);
        if (count < minSupportMutable.longValue()) {
            i++;
            continue;
        }
        updater.update("FPGrowth Algorithm for a given feature: " + attribute);
        FPTree conditionalTree = treeCache.getFirstLevelTree(attribute);
        if (conditionalTree.isEmpty()) {
            traverseAndBuildConditionalFPTreeData(tree.getHeaderNext(attribute), minSupportMutable.longValue(),
                    conditionalTree, tree);
            // printTree(conditionalTree);

        }

        FrequentPatternMaxHeap returnedPatterns;
        if (attribute == currentAttribute) {

            returnedPatterns = growthTopDown(conditionalTree, minSupportMutable, k, treeCache, level + 1, true,
                    currentAttribute, updater);

            frequentPatterns = mergeHeap(frequentPatterns, returnedPatterns, attribute, count, true);
        } else {
            returnedPatterns = growthTopDown(conditionalTree, minSupportMutable, k, treeCache, level + 1, false,
                    currentAttribute, updater);
            frequentPatterns = mergeHeap(frequentPatterns, returnedPatterns, attribute, count, false);
        }
        if (frequentPatterns.isFull() && minSupportMutable.longValue() < frequentPatterns.leastSupport()) {
            minSupportMutable.setValue(frequentPatterns.leastSupport());
        }
        i++;
    }

    return frequentPatterns;
}

From source file:com.cg.mapreduce.fpgrowth.mahout.fpm.fpgrowth.FPGrowth.java

private static FrequentPatternMaxHeap growthBottomUp(FPTree tree, MutableLong minSupportMutable, int k,
        FPTreeDepthCache treeCache, int level, boolean conditionalOfCurrentAttribute, int currentAttribute,
        StatusUpdater updater) {//from  w  ww.  ja va2  s. c  o  m

    FrequentPatternMaxHeap frequentPatterns = new FrequentPatternMaxHeap(k, false);

    if (!conditionalOfCurrentAttribute) {
        int index = Arrays.binarySearch(tree.getHeaderTableAttributes(), currentAttribute);
        if (index < 0) {
            return frequentPatterns;
        } else {
            int attribute = tree.getAttributeAtIndex(index);
            long count = tree.getHeaderSupportCount(attribute);
            if (count < minSupportMutable.longValue()) {
                return frequentPatterns;
            }
        }
    }

    if (tree.singlePath()) {
        return generateSinglePathPatterns(tree, k, minSupportMutable.longValue());
    }

    updater.update("Bottom Up FP Growth");
    for (int i = tree.getHeaderTableCount() - 1; i >= 0; i--) {
        int attribute = tree.getAttributeAtIndex(i);
        long count = tree.getHeaderSupportCount(attribute);
        if (count < minSupportMutable.longValue()) {
            continue;
        }
        FPTree conditionalTree = treeCache.getTree(level);

        FrequentPatternMaxHeap returnedPatterns;
        if (conditionalOfCurrentAttribute) {
            traverseAndBuildConditionalFPTreeData(tree.getHeaderNext(attribute), minSupportMutable.longValue(),
                    conditionalTree, tree);
            returnedPatterns = growthBottomUp(conditionalTree, minSupportMutable, k, treeCache, level + 1, true,
                    currentAttribute, updater);

            frequentPatterns = mergeHeap(frequentPatterns, returnedPatterns, attribute, count, true);
        } else {
            if (attribute == currentAttribute) {
                traverseAndBuildConditionalFPTreeData(tree.getHeaderNext(attribute),
                        minSupportMutable.longValue(), conditionalTree, tree);
                returnedPatterns = growthBottomUp(conditionalTree, minSupportMutable, k, treeCache, level + 1,
                        true, currentAttribute, updater);

                frequentPatterns = mergeHeap(frequentPatterns, returnedPatterns, attribute, count, true);
            } else if (attribute > currentAttribute) {
                traverseAndBuildConditionalFPTreeData(tree.getHeaderNext(attribute),
                        minSupportMutable.longValue(), conditionalTree, tree);
                returnedPatterns = growthBottomUp(conditionalTree, minSupportMutable, k, treeCache, level + 1,
                        false, currentAttribute, updater);
                frequentPatterns = mergeHeap(frequentPatterns, returnedPatterns, attribute, count, false);
            }
        }

        if (frequentPatterns.isFull() && minSupportMutable.longValue() < frequentPatterns.leastSupport()) {
            minSupportMutable.setValue(frequentPatterns.leastSupport());
        }
    }

    return frequentPatterns;
}

From source file:com.cg.mapreduce.fpgrowth.mahout.fpm.fpgrowth.FPGrowth.java

private static FrequentPatternMaxHeap growthTopDown(FPTree tree, MutableLong minSupportMutable, int k,
        FPTreeDepthCache treeCache, int level, boolean conditionalOfCurrentAttribute, int currentAttribute,
        StatusUpdater updater) {// www . jav  a 2s.  c o m

    FrequentPatternMaxHeap frequentPatterns = new FrequentPatternMaxHeap(k, true);

    if (!conditionalOfCurrentAttribute) {
        int index = Arrays.binarySearch(tree.getHeaderTableAttributes(), currentAttribute);
        if (index < 0) {
            return frequentPatterns;
        } else {
            int attribute = tree.getAttributeAtIndex(index);
            long count = tree.getHeaderSupportCount(attribute);
            if (count < minSupportMutable.longValue()) {
                return frequentPatterns;
            }
        }
    }

    if (tree.singlePath()) {
        return generateSinglePathPatterns(tree, k, minSupportMutable.longValue());
    }

    updater.update("Top Down Growth:");

    for (int i = 0; i < tree.getHeaderTableCount(); i++) {
        int attribute = tree.getAttributeAtIndex(i);
        long count = tree.getHeaderSupportCount(attribute);
        if (count < minSupportMutable.longValue()) {
            continue;
        }

        FPTree conditionalTree = treeCache.getTree(level);

        FrequentPatternMaxHeap returnedPatterns;
        if (conditionalOfCurrentAttribute) {
            traverseAndBuildConditionalFPTreeData(tree.getHeaderNext(attribute), minSupportMutable.longValue(),
                    conditionalTree, tree);

            returnedPatterns = growthBottomUp(conditionalTree, minSupportMutable, k, treeCache, level + 1, true,
                    currentAttribute, updater);
            frequentPatterns = mergeHeap(frequentPatterns, returnedPatterns, attribute, count, true);

        } else {
            if (attribute == currentAttribute) {
                traverseAndBuildConditionalFPTreeData(tree.getHeaderNext(attribute),
                        minSupportMutable.longValue(), conditionalTree, tree);
                returnedPatterns = growthBottomUp(conditionalTree, minSupportMutable, k, treeCache, level + 1,
                        true, currentAttribute, updater);
                frequentPatterns = mergeHeap(frequentPatterns, returnedPatterns, attribute, count, true);

            } else if (attribute > currentAttribute) {
                traverseAndBuildConditionalFPTreeData(tree.getHeaderNext(attribute),
                        minSupportMutable.longValue(), conditionalTree, tree);
                returnedPatterns = growthBottomUp(conditionalTree, minSupportMutable, k, treeCache, level + 1,
                        false, currentAttribute, updater);
                frequentPatterns = mergeHeap(frequentPatterns, returnedPatterns, attribute, count, false);

            }
        }
        if (frequentPatterns.isFull() && minSupportMutable.longValue() < frequentPatterns.leastSupport()) {
            minSupportMutable.setValue(frequentPatterns.leastSupport());
        }
    }

    return frequentPatterns;
}

From source file:fr.duminy.jbackup.core.archive.FileCollector.java

public void collectFiles(List<SourceWithPath> collectedFiles, ArchiveParameters archiveParameters,
        TaskListener listener, Cancellable cancellable) throws ArchiveException {
    MutableLong totalSize = new MutableLong();
    try {/*  w w w . j  a  v  a 2s .c o  m*/
        collectFilesImpl(collectedFiles, archiveParameters.getSources(), totalSize, cancellable);
    } catch (IOException ioe) {
        throw new ArchiveException(ioe);
    }
    if (listener != null) {
        listener.totalSizeComputed(totalSize.longValue());
    }
}

From source file:com.norconex.committer.core.AbstractFileQueueCommitter.java

@Override
protected long getInitialQueueDocCount() {
    final MutableLong fileCount = new MutableLong();

    // --- Additions and Deletions ---
    FileUtil.visitAllFiles(new File(queue.getDirectory()), new IFileVisitor() {
        @Override/*w  w  w  . jav  a2s .co  m*/
        public void visit(File file) {
            fileCount.increment();
        }
    }, REF_FILTER);
    return fileCount.longValue();
}

From source file:com.cg.mapreduce.fpgrowth.mahout.fpm.fpgrowth2.FPGrowthObj.java

/**
 * Top K FpGrowth Algorithm/*from  w ww .jav a 2  s  .c o  m*/
 *
 * @param tree
 *          to be mined
 * @param minSupportValue
 *          minimum support of the pattern to keep
 * @param k
 *          Number of top frequent patterns to keep
 * @param requiredFeatures
 *          Set of integer id's of features to mine
 * @param outputCollector
 *          the Collector class which converts the given frequent pattern in
 *          integer to A
 * @return Top K Frequent Patterns for each feature and their support
 */
private Map<Integer, FrequentPatternMaxHeap> fpGrowth(FPTree tree, long minSupportValue, int k,
        Collection<Integer> requiredFeatures, TopKPatternsOutputConverter<A> outputCollector,
        StatusUpdater updater) throws IOException {

    Map<Integer, FrequentPatternMaxHeap> patterns = Maps.newHashMap();
    for (int attribute : tree.attrIterableRev()) {
        if (requiredFeatures.contains(attribute)) {
            log.info("Mining FTree Tree for all patterns with {}", attribute);
            MutableLong minSupport = new MutableLong(minSupportValue);
            FrequentPatternMaxHeap frequentPatterns = growth(tree, minSupport, k, attribute, updater);
            patterns.put(attribute, frequentPatterns);
            outputCollector.collect(attribute, frequentPatterns);

            minSupportValue = Math.max(minSupportValue, minSupport.longValue() / 2);
            log.info("Found {} Patterns with Least Support {}", patterns.get(attribute).count(),
                    patterns.get(attribute).leastSupport());
        }
    }
    return patterns;
}