Example usage for org.joda.time Duration ZERO

List of usage examples for org.joda.time Duration ZERO

Introduction

In this page you can find the example usage for org.joda.time Duration ZERO.

Prototype

Duration ZERO

To view the source code for org.joda.time Duration ZERO.

Click Source Link

Document

Constant representing zero millisecond duration

Usage

From source file:au.id.hazelwood.xmltvguidebuilder.postprocessor.ListingVerifier.java

License:Apache License

public void verifyListing(ChannelListings listings, DateTime from, DateTime to, DateTime subsetTo) {
    Duration listingDurationTotal = new Interval(from, to).toDuration();
    Duration listingDurationSubset = new Interval(from, subsetTo).toDuration();
    LOGGER.info(repeat("-", 100));
    for (ChannelDetail channelDetail : listings.getChannels()) {
        Duration missingDurationTotal = Duration.ZERO;
        Duration missingDurationSubset = Duration.ZERO;
        StringBuilder allMissingIntervalDetails = new StringBuilder();
        for (Interval missing : findMissingIntervals(listings, from, to, channelDetail.getId())) {
            missingDurationTotal = missingDurationTotal.plus(missing.toDuration());
            if (missing.getStart().isBefore(subsetTo)) {
                if (missing.getEnd().isBefore(subsetTo)) {
                    missingDurationSubset = missingDurationSubset.plus(missing.toDuration());
                } else {
                    missingDurationSubset = missingDurationSubset
                            .plus(new Duration(missing.getStart(), subsetTo));
                }//  w  ww. j  a  v  a2  s . c  om
            }
            allMissingIntervalDetails.append(allMissingIntervalDetails.length() == 0 ? "missing " : ", ");
            allMissingIntervalDetails.append(
                    format("{0}-{1}", toISODateTime(missing.getStart()), toISODateTime(missing.getEnd())));
        }
        Duration availableDurationTotal = listingDurationTotal.minus(missingDurationTotal);
        Duration availableDurationSubset = listingDurationSubset.minus(missingDurationSubset);
        Integer availablePercentageTotal = getPercentage(availableDurationTotal, listingDurationTotal);
        Integer availablePercentageSubset = getPercentage(availableDurationSubset, listingDurationSubset);
        LOGGER.info("{} {} [{}|{}] {}", rightPad(channelDetail.getId() + " - " + channelDetail.getName(), 42),
                formatDurationDHM(availableDurationTotal.getMillis()),
                leftPad(availablePercentageSubset + "%", 4), leftPad(availablePercentageTotal + "%", 4),
                allMissingIntervalDetails.toString());
    }
    LOGGER.info(repeat("-", 100));
}

From source file:ch.eitchnet.android.util.JodaHelper.java

License:Open Source License

public static String toHourMinute(Duration duration) {
    PeriodFormatterBuilder builder = new PeriodFormatterBuilder();
    builder.printZeroAlways();/* w  w w  . j  ava  2 s  .c o m*/
    if (duration.isShorterThan(Duration.ZERO))
        builder.appendLiteral("-");
    builder.minimumPrintedDigits(2).appendHours().appendLiteral(":").minimumPrintedDigits(2).appendMinutes();
    return builder.toFormatter().print(new Period(Math.abs(duration.getMillis())));
}

From source file:ch.oakmountain.tpa.solver.TrainPathApplicationStatistics.java

License:Apache License

List<String> compileAndGetTrainPathApplicationListRow() throws IOException {
    HashMap<TrainPathSlot, Vertex> slotVertexHashMap = new HashMap<>();
    HashMap<SystemNode, Set<TrainPathSlot>> systemNodeTrainPathSlotHashMap = new HashMap<>();
    HashMap<SystemNode, Set<Pair<TrainPathSlot, TrainPathSlot>>> connectionsThroughSystemNode = new HashMap<>();
    for (Vertex vertex : dag.getVerticies()) {
        if (vertex.isLeaf() || vertex.isRoot()) {
            continue;
        }/*from   ww w. ja v  a2 s  .  com*/
        TrainPathSlot trainPathSlot = dag.getSlotFromVertex(vertex.getLabel());
        slotVertexHashMap.put(trainPathSlot, vertex);
        SystemNode from = trainPathSlot.getFrom();
        SystemNode to = trainPathSlot.getTo();
        initSystemNodeInMaps(systemNodeTrainPathSlotHashMap, connectionsThroughSystemNode, to);
        initSystemNodeInMaps(systemNodeTrainPathSlotHashMap, connectionsThroughSystemNode, from);
        systemNodeTrainPathSlotHashMap.get(from).add(trainPathSlot);

        for (Vertex child : vertex.getChildren()) {
            if (vertex.isLeaf() || vertex.isRoot()) {
                continue;
            }
            TrainPathSlot childSlot = dag.getSlotFromVertex(child.getLabel());
            Pair<TrainPathSlot, TrainPathSlot> connection = new Pair<TrainPathSlot, TrainPathSlot>(
                    trainPathSlot, childSlot);

            connectionsThroughSystemNode.get(to).add(connection);
        }
    }
    int minSlotsPerSystemNode = Integer.MAX_VALUE;
    int maxSlotsPerSystemNode = Integer.MIN_VALUE;

    for (SystemNode systemNode : systemNodeTrainPathSlotHashMap.keySet()) {

        Set<TrainPathSlot> succSlots = systemNodeTrainPathSlotHashMap.get(systemNode);
        int nbSuccSlots = succSlots.size();
        maxSlotsPerSystemNode = Math.max(nbSuccSlots, maxSlotsPerSystemNode);
        minSlotsPerSystemNode = Math.min(nbSuccSlots, minSlotsPerSystemNode);

        Duration minDwellTime = new Duration(Long.MAX_VALUE);
        Duration maxDwellTime = Duration.ZERO;
        Duration totalDwellTime = Duration.ZERO;

        Set<Pair<TrainPathSlot, TrainPathSlot>> connections = connectionsThroughSystemNode.get(systemNode);
        String dwellStats = "--";
        if (!systemNode.equals(simpleTrainPathApplication.getTo())
                && !systemNode.equals(simpleTrainPathApplication.getFrom())) {
            for (Pair<TrainPathSlot, TrainPathSlot> trainPathSlotTrainPathSlotPair : connections) {
                Duration dwell = trainPathSlotTrainPathSlotPair.second.getStartTime()
                        .distanceAfter(trainPathSlotTrainPathSlotPair.first.getEndTime());
                if (dwell.isShorterThan(Duration.ZERO)) {
                    throw new IllegalStateException("");
                }
                if (dwell.isLongerThan(maxDwellTime)) {
                    maxDwellTime = dwell;
                }
                if (dwell.isShorterThan(minDwellTime)) {
                    minDwellTime = dwell;
                }
                totalDwellTime = totalDwellTime.plus(dwell);
            }
            dwellStats = PeriodicalTimeFrame.formatDuration(minDwellTime) + "/"
                    + PeriodicalTimeFrame.formatDuration(maxDwellTime) + "/"
                    + PeriodicalTimeFrame.formatDuration(
                            totalDwellTime.dividedBy(connectionsThroughSystemNode.get(systemNode).size()));
        }

        String timeWindow;
        if (systemNode.equals(simpleTrainPathApplication.getFrom())) {
            timeWindow = "[" + simpleTrainPathApplication.getParams().getDepartureLowerBound().toString() + ","
                    + simpleTrainPathApplication.getParams().getDepartureUpperBound().toString() + "]";
        } else if (systemNode.equals(simpleTrainPathApplication.getTo())) {
            timeWindow = "[" + simpleTrainPathApplication.getParams().getArrivalLowerBound().toString() + ","
                    + simpleTrainPathApplication.getParams().getArrivalUpperBound().toString() + "]";
        } else {
            timeWindow = "[arr+ "
                    + PeriodicalTimeFrame.formatDuration(simpleTrainPathApplication.getParams()
                            .getMINIMUM_DWELL_TIME())
                    + ", arr+"
                    + PeriodicalTimeFrame.formatDuration(simpleTrainPathApplication.getParams()
                            .getHARD_MINIMUM_DWELL_TIME().plus(simpleTrainPathApplication.getParams()
                                    .getMAXIMUM_ADDITIONAL_DWELL_TIME(systemNode)))
                    + "]";
        }
        table.writeRow(Arrays.asList(
                systemNode.getName(), String.valueOf(nbSuccSlots), timeWindow, "["
                        + PeriodicalTimeFrame
                                .formatDuration(simpleTrainPathApplication.getParams().getMINIMUM_DWELL_TIME())
                        + ","
                        + PeriodicalTimeFrame.formatDuration(simpleTrainPathApplication.getParams()
                                .getMAXIMUM_ADDITIONAL_DWELL_TIME(systemNode))
                        + "]",
                "Min/max/average slots", dwellStats));
    }

    List<String> data = Arrays.asList(simpleTrainPathApplication.getName(),
            simpleTrainPathApplication.getFrom().getName(), simpleTrainPathApplication.getTo().getName(),
            simpleTrainPathApplication.getStartTime().toString(),
            simpleTrainPathApplication.getEndTime().toString(),
            PeriodicalTimeFrame
                    .formatDuration(simpleTrainPathApplication.getParams().getHARD_MAXIMUM_EARLIER_DEPARTURE()),
            PeriodicalTimeFrame
                    .formatDuration(simpleTrainPathApplication.getParams().getHARD_MAXIMUM_LATER_ARRIVAL()),
            PeriodicalTimeFrame
                    .formatDuration(simpleTrainPathApplication.getParams().getHARD_MINIMUM_DWELL_TIME()),
            String.valueOf(dag.nbPaths()), String.valueOf(dag.getCyclomaticComplexity()));

    table.finishTable();
    return data;
}

From source file:com.almende.dsol.example.datacenters.FederationModelSimulationGUI.java

License:Apache License

private void simStart() {
    this.startButton.setText("Running");

    disableGUI(true);//from   www  .j  a  va2 s  . c  o m
    doChartUpdate(this.sac.getData());

    // clear current indicators and chart series data
    Datacenter.ID_COUNT = 0;
    synchronized (this.sac.getData()) {
        this.sac.getData().clear();
        this.indicators.clear();
    }

    this.model = new FederationModel(FEDERATION_SIZE, !AllocationPolicy.LOCAL.equals(policySelect.getValue()),
            AllocationPolicy.BROKERED.equals(policySelect.getValue()));

    // listen for new data centers to synchronize update graph time series
    this.model.addListener(new EventListenerInterface() {

        @Override
        public void notify(final EventInterface event) throws RemoteException {
            final Datacenter dc = (Datacenter) event.getContent();

            setupSeries(chooseIndicator(dc));
        }
    }, FederationModelComponent.NEW_DC);

    this.endReport = false;
    try {

        LOG.trace("Replication initializing...");
        final DEVSSimulatorInterface sim = new DEVSSimulator();
        final Context ctx = new InitialContext();
        final short mode = Treatment.REPLICATION_MODE_TERMINATING;
        final Experiment exp = new Experiment(ctx.createSubcontext("/exp"));
        final TimeUnitInterface timeUnit = TimeUnitInterface.DAY;

        exp.setSimulator(sim);
        exp.setModel(this.model);
        exp.setAnalyst("A4G");
        exp.setDescription("WP5 Phase 2");
        exp.setTreatment(new Treatment(exp, mode));
        exp.getTreatment().setStartTime(DateTime.now().withDayOfMonth(1).withTimeAtStartOfDay().getMillis());
        exp.getTreatment().setTimeUnit(timeUnit);
        exp.getTreatment().setWarmupPeriod(Double.valueOf(this.warmUpField.getText()));
        exp.getTreatment().setRunLength(Double.valueOf(this.lengthField.getText()) + .000001);
        final Replication repl = new Replication(exp.getContext().createSubcontext("/rep"), exp);
        final long seed = Long.valueOf(this.seedStartField.getText());
        this.seedStartField.setText(Long.toString(seed + 1));
        repl.setStreams(
                Collections.singletonMap(FederationModel.RNG_ID, (StreamInterface) new MersenneTwister()));
        sim.initialize(repl, mode);

        // start statistics flow to listen and draw
        scheduleStats(Duration.ZERO);

        ((NumberAxis) this.sac.getXAxis()).setLowerBound(sim.getReplication().getTreatment().getWarmupPeriod());
        ((NumberAxis) this.sac.getXAxis()).setUpperBound(sim.getReplication().getTreatment().getRunLength());

        // listen for simulation start/resume
        sim.addListener(new EventListenerInterface() {
            @Override
            public void notify(final EventInterface event) {
                LOG.trace("Sim started/resumed, t= " + model.getDateTime());
            }
        }, Simulator.START_EVENT);

        // listen for simulation ended
        sim.addListener(new EventListenerInterface() {
            @Override
            public void notify(final EventInterface event) {
                simEnded();
            }
        }, Simulator.END_OF_REPLICATION_EVENT);

        LOG.trace("Replication initialized, starting...");
        sim.start();
    } catch (final Exception e) {
        LOG.error("Problem reaching/starting sim", e);
    }
}

From source file:com.arpnetworking.metrics.mad.PeriodWorker.java

License:Apache License

/**
 * {@inheritDoc}//from   ww  w. jav a 2  s  .c om
 */
@Override
public void run() {
    Thread.currentThread()
            .setUncaughtExceptionHandler((thread, throwable) -> LOGGER.error().setMessage("Unhandled exception")
                    .addData("periodWorker", PeriodWorker.this).setThrowable(throwable).log());

    while (_isRunning) {
        try {
            DateTime now = DateTime.now();
            final DateTime rotateAt = getRotateAt(now);
            Duration timeToRotate = new Duration(now, rotateAt);
            while (_isRunning && timeToRotate.isLongerThan(Duration.ZERO)) {
                // Process records or sleep
                Record recordToProcess = _recordQueue.poll();
                if (recordToProcess != null) {
                    while (recordToProcess != null) {
                        process(recordToProcess);
                        recordToProcess = _recordQueue.poll();
                    }
                } else {
                    Thread.sleep(Math.min(timeToRotate.getMillis(), 100));
                }
                // Recompute time to close
                now = DateTime.now();
                timeToRotate = new Duration(now, rotateAt);
            }
            // Drain the record queue before rotating
            final List<Record> recordsToProcess = Lists.newArrayList();
            _recordQueue.drainTo(recordsToProcess);
            for (final Record recordToProcess : recordsToProcess) {
                process(recordToProcess);
            }
            // Rotate
            rotate(now);
        } catch (final InterruptedException e) {
            Thread.interrupted();
            LOGGER.warn().setMessage("Interrupted waiting to close buckets").setThrowable(e).log();
            // CHECKSTYLE.OFF: IllegalCatch - Top level catch to prevent thread death
        } catch (final Exception e) {
            // CHECKSTYLE.ON: IllegalCatch
            LOGGER.error().setMessage("Aggregator failure").addData("periodWorker", this).setThrowable(e).log();
        }
    }
}

From source file:com.buabook.exacttarget.fuelsdk.AutoRefreshETClient.java

License:GNU General Public License

private void initialiseRefreshTracking(Duration refreshTokenLife) throws IllegalArgumentException {
    if (refreshTokenLife == null || refreshTokenLife.isEqual(Duration.ZERO))
        throw new IllegalArgumentException("Invalid refresh token life duration");

    this.refreshTokenLife = refreshTokenLife;
    this.lastRefreshTime = DateTime.now();

    log.info("ExactTarget client with auto-refresh of refresh token created [ Refresh Duration: "
            + refreshTokenLife + " ]");
}

From source file:com.dataartisans.flink.dataflow.examples.streaming.AutoComplete.java

License:Apache License

public static void main(String[] args) throws IOException {
    Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class);
    options.setStreaming(true);//from  w w  w.ja va 2s. c  o m
    options.setCheckpointingInterval(1000L);
    options.setNumberOfExecutionRetries(5);
    options.setExecutionRetryDelay(3000L);
    options.setRunner(FlinkPipelineRunner.class);

    PTransform<? super PBegin, PCollection<String>> readSource = Read
            .from(new UnboundedSocketSource<>("localhost", 9999, '\n', 3)).named("WordStream");
    WindowFn<Object, ?> windowFn = FixedWindows.of(Duration.standardSeconds(options.getWindowSize()));

    // Create the pipeline.
    Pipeline p = Pipeline.create(options);
    PCollection<KV<String, List<CompletionCandidate>>> toWrite = p.apply(readSource)
            .apply(ParDo.of(new ExtractWordsFn()))
            .apply(Window.<String>into(windowFn).triggering(AfterWatermark.pastEndOfWindow())
                    .withAllowedLateness(Duration.ZERO).discardingFiredPanes())
            .apply(ComputeTopCompletions.top(10, options.getRecursive()));

    toWrite.apply(ParDo.named("FormatForPerTaskFile").of(new FormatForPerTaskLocalFile()))
            .apply(TextIO.Write.to("./outputAutoComplete.txt"));

    p.run();
}

From source file:com.dataartisans.flink.dataflow.examples.streaming.JoinExamples.java

License:Apache License

public static void main(String[] args) throws Exception {
    Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class);
    options.setStreaming(true);/*from   w ww.j a  v  a 2 s  . c o  m*/
    options.setCheckpointingInterval(1000L);
    options.setNumberOfExecutionRetries(5);
    options.setExecutionRetryDelay(3000L);
    options.setRunner(FlinkPipelineRunner.class);

    PTransform<? super PBegin, PCollection<String>> readSourceA = Read
            .from(new UnboundedSocketSource<>("localhost", 9999, '\n', 3)).named("FirstStream");
    PTransform<? super PBegin, PCollection<String>> readSourceB = Read
            .from(new UnboundedSocketSource<>("localhost", 9998, '\n', 3)).named("SecondStream");

    WindowFn<Object, ?> windowFn = FixedWindows.of(Duration.standardSeconds(options.getWindowSize()));

    Pipeline p = Pipeline.create(options);

    // the following two 'applys' create multiple inputs to our pipeline, one for each
    // of our two input sources.
    PCollection<String> streamA = p.apply(readSourceA)
            .apply(Window.<String>into(windowFn).triggering(AfterWatermark.pastEndOfWindow())
                    .withAllowedLateness(Duration.ZERO).discardingFiredPanes());
    PCollection<String> streamB = p.apply(readSourceB)
            .apply(Window.<String>into(windowFn).triggering(AfterWatermark.pastEndOfWindow())
                    .withAllowedLateness(Duration.ZERO).discardingFiredPanes());

    PCollection<String> formattedResults = joinEvents(streamA, streamB);
    formattedResults.apply(TextIO.Write.to("./outputJoin.txt"));
    p.run();
}

From source file:com.dataartisans.flink.dataflow.examples.streaming.KafkaWindowedWordCountExample.java

License:Apache License

public static void main(String[] args) {
    PipelineOptionsFactory.register(KafkaStreamingWordCountOptions.class);
    KafkaStreamingWordCountOptions options = PipelineOptionsFactory.fromArgs(args)
            .as(KafkaStreamingWordCountOptions.class);
    options.setJobName("KafkaExample - WindowSize: " + options.getWindowSize() + " seconds");
    options.setStreaming(true);/*w ww  .  j a v a  2s.co  m*/
    options.setCheckpointingInterval(1000L);
    options.setNumberOfExecutionRetries(5);
    options.setExecutionRetryDelay(3000L);
    options.setRunner(FlinkPipelineRunner.class);

    System.out.println(options.getKafkaTopic() + " " + options.getZookeeper() + " " + options.getBroker() + " "
            + options.getGroup());
    Pipeline pipeline = Pipeline.create(options);

    Properties p = new Properties();
    p.setProperty("zookeeper.connect", options.getZookeeper());
    p.setProperty("bootstrap.servers", options.getBroker());
    p.setProperty("group.id", options.getGroup());

    // this is the Flink consumer that reads the input to
    // the program from a kafka topic.
    FlinkKafkaConsumer08<String> kafkaConsumer = new FlinkKafkaConsumer08<>(options.getKafkaTopic(),
            new SimpleStringSchema(), p);

    PCollection<String> words = pipeline
            .apply(Read.from(new UnboundedFlinkSource<>(kafkaConsumer)).named("StreamingWordCount"))
            .apply(ParDo.of(new ExtractWordsFn()))
            .apply(Window.<String>into(FixedWindows.of(Duration.standardSeconds(options.getWindowSize())))
                    .triggering(AfterWatermark.pastEndOfWindow()).withAllowedLateness(Duration.ZERO)
                    .discardingFiredPanes());

    PCollection<KV<String, Long>> wordCounts = words.apply(Count.<String>perElement());

    wordCounts.apply(ParDo.of(new FormatAsStringFn())).apply(TextIO.Write.to("./outputKafka.txt"));

    pipeline.run();
}

From source file:com.dataartisans.flink.dataflow.examples.streaming.WindowedWordCount.java

License:Apache License

public static void main(String[] args) throws IOException {
    StreamingWordCountOptions options = PipelineOptionsFactory.fromArgs(args).withValidation()
            .as(StreamingWordCountOptions.class);
    options.setStreaming(true);/*from  w ww . j  a  va2 s . c  o m*/
    options.setWindowSize(10L);
    options.setSlide(5L);
    options.setCheckpointingInterval(1000L);
    options.setNumberOfExecutionRetries(5);
    options.setExecutionRetryDelay(3000L);
    options.setRunner(FlinkPipelineRunner.class);

    LOG.info("Windpwed WordCount with Sliding Windows of " + options.getWindowSize() + " sec. and a slide of "
            + options.getSlide());

    Pipeline pipeline = Pipeline.create(options);

    PCollection<String> words = pipeline
            .apply(Read
                    .from(new UnboundedSocketSource<>("localhost", 9999, '\n', 3)).named("StreamingWordCount"))
            .apply(ParDo.of(new ExtractWordsFn()))
            .apply(Window
                    .<String>into(SlidingWindows.of(Duration.standardSeconds(options.getWindowSize()))
                            .every(Duration.standardSeconds(options.getSlide())))
                    .triggering(AfterWatermark.pastEndOfWindow()).withAllowedLateness(Duration.ZERO)
                    .discardingFiredPanes());

    PCollection<KV<String, Long>> wordCounts = words.apply(Count.<String>perElement());

    wordCounts.apply(ParDo.of(new FormatAsStringFn())).apply(TextIO.Write.to("./outputWordCount.txt"));

    pipeline.run();
}