Example usage for org.apache.commons.lang3.mutable MutableLong setValue

List of usage examples for org.apache.commons.lang3.mutable MutableLong setValue

Introduction

In this page you can find the example usage for org.apache.commons.lang3.mutable MutableLong setValue.

Prototype

@Override
public void setValue(final Number value) 

Source Link

Document

Sets the value from any Number instance.

Usage

From source file:com.hazelcast.jet.benchmark.trademonitor.FlinkTradeMonitor.java

public static void main(String[] args) throws Exception {
    if (args.length != 13) {
        System.err.println("Usage:");
        System.err.println("  " + FlinkTradeMonitor.class.getSimpleName()
                + " <bootstrap.servers> <topic> <offset-reset> <maxLagMs> <windowSizeMs> <slideByMs> <outputPath> <checkpointInterval> <checkpointUri> <doAsyncSnapshot> <stateBackend> <kafkaParallelism> <windowParallelism>");
        System.err.println("<stateBackend> - fs | rocksDb");
        System.exit(1);/*from   ww w  . j av a2s . c o m*/
    }
    String brokerUri = args[0];
    String topic = args[1];
    String offsetReset = args[2];
    int lagMs = Integer.parseInt(args[3]);
    int windowSize = Integer.parseInt(args[4]);
    int slideBy = Integer.parseInt(args[5]);
    String outputPath = args[6];
    int checkpointInt = Integer.parseInt(args[7]);
    String checkpointUri = args[8];
    boolean doAsyncSnapshot = Boolean.parseBoolean(args[9]);
    String stateBackend = args[10];
    int kafkaParallelism = Integer.parseInt(args[11]);
    int windowParallelism = Integer.parseInt(args[12]);

    System.out.println("bootstrap.servers: " + brokerUri);
    System.out.println("topic: " + topic);
    System.out.println("offset-reset: " + offsetReset);
    System.out.println("lag: " + lagMs);
    System.out.println("windowSize: " + windowSize);
    System.out.println("slideBy: " + slideBy);
    System.out.println("outputPath: " + outputPath);
    System.out.println("checkpointInt: " + checkpointInt);
    System.out.println("checkpointUri: " + checkpointUri);
    System.out.println("doAsyncSnapshot: " + doAsyncSnapshot);
    System.out.println("stateBackend: " + stateBackend);
    System.out.println("kafkaParallelism: " + kafkaParallelism);
    System.out.println("windowParallelism: " + windowParallelism);

    // set up the execution environment
    StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
    if (checkpointInt > 0) {
        env.enableCheckpointing(checkpointInt);
        env.getCheckpointConfig().setMinPauseBetweenCheckpoints(checkpointInt);
    }
    env.setRestartStrategy(RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 5000));
    if ("fs".equalsIgnoreCase(stateBackend)) {
        env.setStateBackend(new FsStateBackend(checkpointUri, doAsyncSnapshot));
    } else if ("rocksDb".equalsIgnoreCase(stateBackend)) {
        env.setStateBackend(new RocksDBStateBackend(checkpointUri));
    } else {
        System.err.println("Bad value for stateBackend: " + stateBackend);
        System.exit(1);
    }

    DeserializationSchema<Trade> schema = new AbstractDeserializationSchema<Trade>() {
        TradeDeserializer deserializer = new TradeDeserializer();

        @Override
        public Trade deserialize(byte[] message) throws IOException {
            return deserializer.deserialize(null, message);
        }
    };

    DataStreamSource<Trade> trades = env
            .addSource(new FlinkKafkaConsumer010<>(topic, schema, getKafkaProperties(brokerUri, offsetReset)))
            .setParallelism(kafkaParallelism);
    AssignerWithPeriodicWatermarks<Trade> timestampExtractor = new BoundedOutOfOrdernessTimestampExtractor<Trade>(
            Time.milliseconds(lagMs)) {
        @Override
        public long extractTimestamp(Trade element) {
            return element.getTime();
        }
    };

    WindowAssigner window = windowSize == slideBy ? TumblingEventTimeWindows.of(Time.milliseconds(windowSize))
            : SlidingEventTimeWindows.of(Time.milliseconds(windowSize), Time.milliseconds(slideBy));

    trades.assignTimestampsAndWatermarks(timestampExtractor).keyBy((Trade t) -> t.getTicker()).window(window)
            .aggregate(new AggregateFunction<Trade, MutableLong, Long>() {

                @Override
                public MutableLong createAccumulator() {
                    return new MutableLong();
                }

                @Override
                public MutableLong add(Trade value, MutableLong accumulator) {
                    accumulator.increment();
                    return accumulator;
                }

                @Override
                public MutableLong merge(MutableLong a, MutableLong b) {
                    a.setValue(Math.addExact(a.longValue(), b.longValue()));
                    return a;
                }

                @Override
                public Long getResult(MutableLong accumulator) {
                    return accumulator.longValue();
                }
            }, new WindowFunction<Long, Tuple5<String, String, Long, Long, Long>, String, TimeWindow>() {
                @Override
                public void apply(String key, TimeWindow window, Iterable<Long> input,
                        Collector<Tuple5<String, String, Long, Long, Long>> out) throws Exception {
                    long timeMs = System.currentTimeMillis();
                    long count = input.iterator().next();
                    long latencyMs = timeMs - window.getEnd() - lagMs;
                    out.collect(
                            new Tuple5<>(Instant.ofEpochMilli(window.getEnd()).atZone(ZoneId.systemDefault())
                                    .toLocalTime().toString(), key, count, timeMs, latencyMs));
                }
            }).setParallelism(windowParallelism).writeAsCsv(outputPath, WriteMode.OVERWRITE);

    env.execute("Trade Monitor Example");
}

From source file:fr.duminy.jbackup.core.archive.FileCollector.java

private void collectFilesImpl(List<SourceWithPath> collectedFiles, Collection<ArchiveParameters.Source> sources,
        MutableLong totalSize, Cancellable cancellable) throws IOException {
    totalSize.setValue(0L);

    for (ArchiveParameters.Source source : sources) {
        Path sourcePath = source.getSource();
        if (!sourcePath.isAbsolute()) {
            throw new IllegalArgumentException(String.format("The file '%s' is relative.", sourcePath));
        }//from   www. j a v a  2 s . c o m

        long size;

        if (Files.isDirectory(sourcePath)) {
            size = collect(collectedFiles, sourcePath, source.getDirFilter(), source.getFileFilter(),
                    cancellable);
        } else {
            collectedFiles.add(new SourceWithPath(sourcePath, sourcePath));
            size = Files.size(sourcePath);
        }

        totalSize.add(size);
    }
}

From source file:com.thinkbiganalytics.nifi.v2.ingest.StripHeader.java

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
    final StripHeaderSupport headerSupport = new StripHeaderSupport();
    final FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;/*  w  w w .ja  v a 2  s .  c  o  m*/
    }

    final boolean isEnabled = context.getProperty(ENABLED).evaluateAttributeExpressions(flowFile).asBoolean();
    final int headerCount = context.getProperty(HEADER_LINE_COUNT).evaluateAttributeExpressions(flowFile)
            .asInteger();

    // Empty files and no work to do will simply pass along content
    if (!isEnabled || headerCount == 0 || flowFile.getSize() == 0L) {
        final FlowFile contentFlowFile = session.clone(flowFile);
        session.transfer(contentFlowFile, REL_CONTENT);
        session.transfer(flowFile, REL_ORIGINAL);
        return;
    }

    final MutableLong headerBoundaryInBytes = new MutableLong(-1);

    session.read(flowFile, false, rawIn -> {
        try {
            // Identify the byte boundary of the header
            long bytes = headerSupport.findHeaderBoundary(headerCount, rawIn);
            headerBoundaryInBytes.setValue(bytes);

            if (bytes < 0) {
                getLog().error("Unable to strip header {} expecting at least {} lines in file",
                        new Object[] { flowFile, headerCount });
            }

        } catch (IOException e) {
            getLog().error("Unable to strip header {} due to {}; routing to failure",
                    new Object[] { flowFile, e.getLocalizedMessage() }, e);
        }

    });

    long headerBytes = headerBoundaryInBytes.getValue();
    if (headerBytes < 0) {
        session.transfer(flowFile, REL_FAILURE);
    } else {
        // Transfer header
        final FlowFile headerFlowFile = session.clone(flowFile, 0, headerBytes);
        session.transfer(headerFlowFile, REL_HEADER);

        // Transfer content
        long contentBytes = flowFile.getSize() - headerBytes;
        final FlowFile contentFlowFile = session.clone(flowFile, headerBytes, contentBytes);
        session.transfer(contentFlowFile, REL_CONTENT);

        session.transfer(flowFile, REL_ORIGINAL);
    }
}

From source file:de.biomedical_imaging.ij.steger.Convol.java

public double[] compute_gauss_mask_1(MutableLong num, double sigma) {
    int i, n;//from   w  w w . j a va2s  .  c o m
    double limit;
    double[] h;

    limit = LinesUtil.MASK_SIZE(LinesUtil.MAX_SIZE_MASK_1, sigma); /* Error < 0.001 on each side */
    n = (int) limit;
    h = new double[2 * n + 1];

    for (i = -n + 1; i <= n - 1; i++)
        h[n + i] = phi1(-i + 0.5, sigma) - phi1(-i - 0.5, sigma);
    h[0] = -phi1(n - 0.5, sigma);
    h[2 * n] = phi1(-n + 0.5, sigma);
    num.setValue(n);
    return h;
}

From source file:de.biomedical_imaging.ij.steger.Convol.java

public double[] compute_gauss_mask_2(MutableLong num, double sigma) {
    int i, n;//ww w .ja v a2s .  co m
    double limit;
    double[] h;

    limit = LinesUtil.MASK_SIZE(LinesUtil.MAX_SIZE_MASK_2, sigma); /* Error < 0.001 on each side */
    n = (int) limit;
    h = new double[2 * n + 1];

    for (i = -n + 1; i <= n - 1; i++)
        h[n + i] = phi2(-i + 0.5, sigma) - phi2(-i - 0.5, sigma);
    h[0] = -phi2(n - 0.5, sigma);
    h[2 * n] = phi2(-n + 0.5, sigma);
    num.setValue(n);
    return h;
}

From source file:de.biomedical_imaging.ij.steger.Convol.java

public double[] compute_gauss_mask_0(MutableLong num, double sigma) {

    int i, n;//  w w w.  j  ava 2 s.  c o  m
    double limit;
    double[] h;

    limit = LinesUtil.MASK_SIZE(LinesUtil.MAX_SIZE_MASK_0, sigma); /* Error < 0.001 on each side */
    n = (int) limit;
    h = new double[2 * n + 1];
    for (i = -n + 1; i <= n - 1; i++)
        h[n + i] = phi0(-i + 0.5, sigma) - phi0(-i - 0.5, sigma);
    h[0] = 1.0 - phi0(n - 0.5, sigma);
    h[2 * n] = phi0(-n + 0.5, sigma);
    num.setValue(n);
    return h;
}

From source file:io.codis.nedis.handler.RedisResponseDecoder.java

private Long decodeLong(ByteBuf in) throws ProtocolException {
    byte sign = in.readByte();
    final MutableLong l;
    boolean negative;
    if (sign == '-') {
        negative = true;//  w  w  w.  j  a  v a2 s  .  c o  m
        l = new MutableLong(0);
    } else {
        negative = false;
        l = new MutableLong(toDigit(sign));
    }
    final MutableBoolean reachCR = new MutableBoolean(false);
    setReaderIndex(in, in.forEachByte(new ByteBufProcessor() {

        @Override
        public boolean process(byte value) throws Exception {
            if (value == '\r') {
                reachCR.setTrue();
                return false;
            } else {
                if (value >= '0' && value <= '9') {
                    l.setValue(l.longValue() * 10 + toDigit(value));
                } else {
                    throw new ProtocolException("Response is not ended by CRLF");
                }
                return true;
            }
        }
    }));
    if (!reachCR.booleanValue()) {
        return null;
    }
    if (!in.isReadable()) {
        return null;
    }
    if (in.readByte() != '\n') {
        throw new ProtocolException("Response is not ended by CRLF");
    }
    return negative ? -l.longValue() : l.longValue();
}

From source file:com.romeikat.datamessie.core.sync.service.template.withIdAndVersion.EntityWithIdAndVersionSynchronizer.java

private boolean deleteBatch(final TaskExecution taskExecution, final MutableLong firstId)
        throws TaskCancelledException {
    final TaskExecutionWork work = taskExecution.startWork();

    // Load RHS//from w w  w. j  a  v a  2  s .  co  m
    final List<Long> rhsIds = dao.getIds(rhsSessionProvider.getStatelessSession(), firstId.getValue(),
            batchSizeIds);
    if (rhsIds.isEmpty()) {
        rhsSessionProvider.closeStatelessSession();
        return false;
    }

    // Feedback
    final long lastId = rhsIds.get(rhsIds.size() - 1);
    final String msg = String.format("Processing batch %s to %s",
            LongConverter.INSTANCE.convertToString(firstId.getValue()),
            LongConverter.INSTANCE.convertToString(lastId));
    taskExecution.reportWorkStart(work, msg);

    // Delete RHS
    delete(rhsIds);
    firstId.setValue(lastId + 1);

    rhsSessionProvider.closeStatelessSession();
    taskExecution.reportWorkEnd(work);
    taskExecution.checkpoint();
    return true;
}

From source file:com.romeikat.datamessie.core.sync.service.template.withIdAndVersion.EntityWithIdAndVersionSynchronizer.java

private boolean createOrUpdateBatch(final TaskExecution taskExecution, final MutableLong firstId)
        throws TaskCancelledException {
    final TaskExecutionWork work = taskExecution.startWork();

    // Load LHS//from   w  w  w.  j a  v  a  2 s .  c  o m
    final TreeMap<Long, Long> lhsIdsWithVersion = dao
            .getIdsWithVersion(lhsSessionProvider.getStatelessSession(), firstId.getValue(), batchSizeIds);
    if (lhsIdsWithVersion.isEmpty()) {
        lhsSessionProvider.closeStatelessSession();
        rhsSessionProvider.closeStatelessSession();
        return false;
    }

    // Feedback
    final long lastId = lhsIdsWithVersion.lastKey();
    final String msg = String.format("Processing batch from %s to %s",
            LongConverter.INSTANCE.convertToString(firstId.getValue()),
            LongConverter.INSTANCE.convertToString(lastId));
    taskExecution.reportWorkStart(work, msg);

    // Create or update RHS
    createOrUpdate(lhsIdsWithVersion, lhsSessionProvider.getStatelessSession(),
            rhsSessionProvider.getStatelessSession(), taskExecution);
    firstId.setValue(lastId + 1);

    lhsSessionProvider.closeStatelessSession();
    rhsSessionProvider.closeStatelessSession();
    taskExecution.reportWorkEnd(work);
    taskExecution.checkpoint();
    return true;
}

From source file:com.cg.mapreduce.fpgrowth.mahout.fpm.fpgrowth.FPGrowth.java

private static FrequentPatternMaxHeap growth(FPTree tree, MutableLong minSupportMutable, int k,
        FPTreeDepthCache treeCache, int level, int currentAttribute, StatusUpdater updater) {

    FrequentPatternMaxHeap frequentPatterns = new FrequentPatternMaxHeap(k, true);

    int i = Arrays.binarySearch(tree.getHeaderTableAttributes(), currentAttribute);
    if (i < 0) {
        return frequentPatterns;
    }//  w w w  . j a v  a  2  s . c o m

    int headerTableCount = tree.getHeaderTableCount();

    while (i < headerTableCount) {
        int attribute = tree.getAttributeAtIndex(i);
        long count = tree.getHeaderSupportCount(attribute);
        if (count < minSupportMutable.longValue()) {
            i++;
            continue;
        }
        updater.update("FPGrowth Algorithm for a given feature: " + attribute);
        FPTree conditionalTree = treeCache.getFirstLevelTree(attribute);
        if (conditionalTree.isEmpty()) {
            traverseAndBuildConditionalFPTreeData(tree.getHeaderNext(attribute), minSupportMutable.longValue(),
                    conditionalTree, tree);
            // printTree(conditionalTree);

        }

        FrequentPatternMaxHeap returnedPatterns;
        if (attribute == currentAttribute) {

            returnedPatterns = growthTopDown(conditionalTree, minSupportMutable, k, treeCache, level + 1, true,
                    currentAttribute, updater);

            frequentPatterns = mergeHeap(frequentPatterns, returnedPatterns, attribute, count, true);
        } else {
            returnedPatterns = growthTopDown(conditionalTree, minSupportMutable, k, treeCache, level + 1, false,
                    currentAttribute, updater);
            frequentPatterns = mergeHeap(frequentPatterns, returnedPatterns, attribute, count, false);
        }
        if (frequentPatterns.isFull() && minSupportMutable.longValue() < frequentPatterns.leastSupport()) {
            minSupportMutable.setValue(frequentPatterns.leastSupport());
        }
        i++;
    }

    return frequentPatterns;
}