Example usage for org.joda.time Duration standardMinutes

List of usage examples for org.joda.time Duration standardMinutes

Introduction

In this page you can find the example usage for org.joda.time Duration standardMinutes.

Prototype

public static Duration standardMinutes(long minutes) 

Source Link

Document

Create a duration with the specified number of minutes assuming that there are the standard number of milliseconds in a minute.

Usage

From source file:$.WindowedWordCount.java

License:Apache License

public static void main(String[] args) throws IOException {
        Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class);
        options.setBigQuerySchema(getSchema());
        // ExampleUtils creates the necessary input sources to simplify execution of this Pipeline.
        ExampleUtils exampleUtils = new ExampleUtils(options);
        exampleUtils.setup();/*  w  w w. ja  v a2s. c om*/

        Pipeline pipeline = Pipeline.create(options);

        /**
         * Concept #1: the Beam SDK lets us run the same pipeline with either a bounded or
         * unbounded input source.
         */
        PCollection<String> input = pipeline
                /** Read from the GCS file. */
                .apply(TextIO.Read.from(options.getInputFile()))
                // Concept #2: Add an element timestamp, using an artificial time just to show windowing.
                // See AddTimestampFn for more detail on this.
                .apply(ParDo.of(new AddTimestampFn()));

        /**
         * Concept #3: Window into fixed windows. The fixed window size for this example defaults to 1
         * minute (you can change this with a command-line option). See the documentation for more
         * information on how fixed windows work, and for information on the other types of windowing
         * available (e.g., sliding windows).
         */
        PCollection<String> windowedWords = input
                .apply(Window.<String>into(FixedWindows.of(Duration.standardMinutes(options.getWindowSize()))));

        /**
         * Concept #4: Re-use our existing CountWords transform that does not have knowledge of
         * windows over a PCollection containing windowed values.
         */
        PCollection<KV<String, Long>> wordCounts = windowedWords.apply(new WordCount.CountWords());

        /**
         * Concept #5: Format the results for a BigQuery table, then write to BigQuery.
         * The BigQuery output source supports both bounded and unbounded data.
         */
        wordCounts.apply(ParDo.of(new FormatAsTableRowFn()))
                .apply(BigQueryIO.Write.to(getTableReference(options)).withSchema(getSchema())
                        .withCreateDisposition(BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED)
                        .withWriteDisposition(BigQueryIO.Write.WriteDisposition.WRITE_APPEND));

        PipelineResult result = pipeline.run();

        // ExampleUtils will try to cancel the pipeline before the program exists.
        exampleUtils.waitToFinish(result);
    }

From source file:com.brighttag.agathon.dao.zerg.ZergDaoModule.java

License:Apache License

@Override
protected void configure() {
    bind(Gson.class).in(Singleton.class);
    bindConstant().annotatedWith(Names.named(ZERG_MANIFEST_URL_PROPERTY))
            .to(System.getProperty(ZERG_MANIFEST_URL_PROPERTY, ZERG_MANIFEST_URL_DEFAULT));
    bindConstant().annotatedWith(Names.named(ZERG_REGION_PROPERTY))
            .to(checkNotNull(System.getProperty(ZERG_REGION_PROPERTY), "Zerg region not specified"));
    bindConstant().annotatedWith(Names.named(ZERG_RING_CONFIG_PROPERTY)).to(
            checkNotNull(System.getProperty(ZERG_RING_CONFIG_PROPERTY), "Zerg ring config file not specified"));
    // Yes, Zerg is THIS SLOW when the manifest isn't cached, especially in AWS.
    bind(Duration.class).annotatedWith(Names.named(ZERG_REQUEST_TIMEOUT_PROPERTY))
            .toInstance(Duration.standardSeconds(Long.getLong(ZERG_REQUEST_TIMEOUT_PROPERTY, 20)));
    bind(Duration.class).annotatedWith(Names.named(ZERG_CONNECTION_TIMEOUT_PROPERTY))
            .toInstance(Duration.standardSeconds(Long.getLong(ZERG_CONNECTION_TIMEOUT_PROPERTY, 20)));
    bind(Duration.class).annotatedWith(Names.named(ZERG_CACHE_TIMEOUT_PROPERTY))
            .toInstance(Duration.standardMinutes(Long.getLong(ZERG_CACHE_TIMEOUT_PROPERTY, 1)));
    bind(ZergConnector.class).to(ZergConnectorImpl.class).in(Singleton.class);
    bind(CassandraRingDao.class).to(ZergCassandraRingDao.class).in(Singleton.class);
    bind(CassandraInstanceDao.class).to(ZergCassandraInstanceDao.class).in(Singleton.class);
    expose(CassandraRingDao.class);
    expose(CassandraInstanceDao.class);
}

From source file:com.cubeia.games.poker.tournament.PokerTournament.java

License:Open Source License

private void scheduleNextBlindsLevel() {
    Duration levelDuration = Duration
            .standardMinutes(pokerState.getCurrentBlindsLevel().getDurationInMinutes());
    long millisecondsToNextLevel = levelDuration.getMillis();
    pokerState.setNextLevelStartTime(dateFetcher.date().plus(levelDuration));
    log.debug("Scheduling next blinds level in " + millisecondsToNextLevel + " millis, for tournament "
            + instance);/*from   ww w.j  a va 2  s.c o m*/
    instance.getScheduler().scheduleAction(
            new MttObjectAction(instance.getId(), TournamentTrigger.INCREASE_LEVEL), millisecondsToNextLevel);
}

From source file:com.dataartisans.flink.dataflow.examples.streaming.WindowWordCount.java

License:Apache License

public static void main(String[] args) {

    Options options = PipelineOptionsFactory.fromArgs(args).as(Options.class);
    options.setRunner(FlinkPipelineRunner.class);

    options.setStreaming(true);/*from   w  ww  .ja  va  2  s  .c  o m*/

    Pipeline p = Pipeline.create(options);

    p.apply(TextIO.Read.named("ReadLines").from(options.getInput())).apply(ParDo.of(new Tokenizer()))
            .apply(Window.<String>into(FixedWindows.of(Duration.standardMinutes(1))))
            .apply(Count.<String>perElement()).apply(ParDo.of(new FormatCountsFn())).apply(TextIO.Write
                    .named("WriteCounts").to(options.getOutput()).withNumShards(options.getNumShards()));

    p.run();
}

From source file:com.dataartisans.flink.dataflow.streaming.TopWikipediaSessionsITCase.java

License:Apache License

@Override
protected void testProgram() throws Exception {

    Pipeline p = FlinkTestPipeline.createForStreaming();

    Long now = (System.currentTimeMillis() + 10000) / 1000;

    PCollection<KV<String, Long>> output = p
            .apply(Create/*  w  w  w  .j  a  va  2s . c om*/
                    .of(Arrays.asList(new TableRow().set("timestamp", now).set("contributor_username", "user1"),
                            new TableRow().set("timestamp", now + 10).set("contributor_username", "user3"),
                            new TableRow().set("timestamp", now).set("contributor_username", "user2"),
                            new TableRow().set("timestamp", now).set("contributor_username", "user1"),
                            new TableRow().set("timestamp", now + 2).set("contributor_username", "user1"),
                            new TableRow().set("timestamp", now).set("contributor_username", "user2"),
                            new TableRow().set("timestamp", now + 1).set("contributor_username", "user2"),
                            new TableRow().set("timestamp", now + 5).set("contributor_username", "user2"),
                            new TableRow().set("timestamp", now + 7).set("contributor_username", "user2"),
                            new TableRow().set("timestamp", now + 8).set("contributor_username", "user2"),
                            new TableRow().set("timestamp", now + 200).set("contributor_username", "user2"),
                            new TableRow().set("timestamp", now + 230).set("contributor_username", "user1"),
                            new TableRow().set("timestamp", now + 230).set("contributor_username", "user2"),
                            new TableRow().set("timestamp", now + 240).set("contributor_username", "user2"),
                            new TableRow().set("timestamp", now + 245).set("contributor_username", "user3"),
                            new TableRow().set("timestamp", now + 235).set("contributor_username", "user3"),
                            new TableRow().set("timestamp", now + 236).set("contributor_username", "user3"),
                            new TableRow().set("timestamp", now + 237).set("contributor_username", "user3"),
                            new TableRow().set("timestamp", now + 238).set("contributor_username", "user3"),
                            new TableRow().set("timestamp", now + 239).set("contributor_username", "user3"),
                            new TableRow().set("timestamp", now + 240).set("contributor_username", "user3"),
                            new TableRow().set("timestamp", now + 241).set("contributor_username", "user2"),
                            new TableRow().set("timestamp", now).set("contributor_username", "user3"))))

            .apply(ParDo.of(new DoFn<TableRow, String>() {
                @Override
                public void processElement(ProcessContext c) throws Exception {
                    TableRow row = c.element();
                    long timestamp = (Integer) row.get("timestamp");
                    String userName = (String) row.get("contributor_username");
                    if (userName != null) {
                        // Sets the timestamp field to be used in windowing.
                        c.outputWithTimestamp(userName, new Instant(timestamp * 1000L));
                    }
                }
            }))

            .apply(Window.<String>into(Sessions.withGapDuration(Duration.standardMinutes(1))))

            .apply(Count.<String>perElement());

    PCollection<String> format = output.apply(ParDo.of(new DoFn<KV<String, Long>, String>() {
        @Override
        public void processElement(ProcessContext c) throws Exception {
            KV<String, Long> el = c.element();
            String out = "user: " + el.getKey() + " value:" + el.getValue();
            c.output(out);
        }
    }));

    format.apply(TextIO.Write.to(resultPath));

    p.run();
}

From source file:com.dataartisans.flink.dataflow.streaming.WindowWordCountITCase.java

License:Apache License

@Override
protected void testProgram() throws Exception {

    Pipeline p = FlinkStreamingTestPipeline.create();

    PCollection<String> input = p.apply(Create.of(WORDS)).setCoder(StringUtf8Coder.of());

    PCollection<String> output = input.apply(ParDo.of(new WindowWordCount.Tokenizer()))
            .apply(Window.<String>into(FixedWindows.of(Duration.standardMinutes(1))))
            .apply(Count.<String>perElement()).apply(ParDo.of(new WindowWordCount.FormatCountsFn()));

    output.apply(TextIO.Write.to(resultPath));
    output.apply(TextIO.Write.to("testoutput"));

    p.run();//w  w w. j ava2s . c om
}

From source file:com.dataartisans.flink.dataflow.TopWikipediaSessionsITCase.java

License:Apache License

@Override
protected void testProgram() throws Exception {

    Pipeline p = FlinkTestPipeline.createStreaming();

    long now = System.currentTimeMillis() + 10000;
    System.out.println((now + 5000) / 1000);

    PCollection<KV<String, Long>> output = p
            .apply(Create//from  ww w. jav a 2s  .  c om
                    .of(Arrays.asList(new TableRow().set("timestamp", now).set("contributor_username", "user1"),
                            new TableRow().set("timestamp", now + 10).set("contributor_username", "user3"),
                            new TableRow().set("timestamp", now).set("contributor_username", "user2"),
                            new TableRow().set("timestamp", now).set("contributor_username", "user1"),
                            new TableRow().set("timestamp", now + 2).set("contributor_username", "user1"),
                            new TableRow().set("timestamp", now).set("contributor_username", "user2"),
                            new TableRow().set("timestamp", now + 1).set("contributor_username", "user2"),
                            new TableRow().set("timestamp", now + 5).set("contributor_username", "user2"),
                            new TableRow().set("timestamp", now + 7).set("contributor_username", "user2"),
                            new TableRow().set("timestamp", now + 8).set("contributor_username", "user2"),
                            new TableRow().set("timestamp", now + 200).set("contributor_username", "user2"),
                            new TableRow().set("timestamp", now + 230).set("contributor_username", "user1"),
                            new TableRow().set("timestamp", now + 230).set("contributor_username", "user2"),
                            new TableRow().set("timestamp", now + 240).set("contributor_username", "user2"),
                            new TableRow().set("timestamp", now + 245).set("contributor_username", "user3"),
                            new TableRow().set("timestamp", now + 235).set("contributor_username", "user3"),
                            new TableRow().set("timestamp", now + 236).set("contributor_username", "user3"),
                            new TableRow().set("timestamp", now + 237).set("contributor_username", "user3"),
                            new TableRow().set("timestamp", now + 238).set("contributor_username", "user3"),
                            new TableRow().set("timestamp", now + 239).set("contributor_username", "user3"),
                            new TableRow().set("timestamp", now + 240).set("contributor_username", "user3"),
                            new TableRow().set("timestamp", now + 241).set("contributor_username", "user2"),
                            new TableRow().set("timestamp", now).set("contributor_username", "user3"))))

            .apply(ParDo.of(new DoFn<TableRow, String>() {
                @Override
                public void processElement(ProcessContext c) throws Exception {
                    TableRow row = c.element();
                    long timestamp = (Long) row.get("timestamp");
                    String userName = (String) row.get("contributor_username");
                    if (userName != null) {
                        // Sets the timestamp field to be used in windowing.
                        c.outputWithTimestamp(userName, new Instant(timestamp * 1000L));
                    }
                }
            }))

            .apply(ParDo.named("SampleUsers").of(new DoFn<String, String>() {
                private static final long serialVersionUID = 0;

                @Override
                public void processElement(ProcessContext c) {
                    if (Math.abs(c.element().hashCode()) <= Integer.MAX_VALUE * 1.0) {
                        c.output(c.element());
                    }
                }
            }))

            .apply(Window.<String>into(Sessions.withGapDuration(Duration.standardMinutes(1))))
            .apply(Count.<String>perElement());

    PCollection<String> format = output.apply(ParDo.of(new DoFn<KV<String, Long>, String>() {
        @Override
        public void processElement(ProcessContext c) throws Exception {
            KV<String, Long> el = c.element();
            String out = "user: " + el.getKey() + " value:" + el.getValue();
            System.out.println(out);
            c.output(out);
        }
    }));

    format.apply(TextIO.Write.to(resultPath));

    p.run();
}

From source file:com.dataradiant.beam.examples.StreamWordCount.java

License:Apache License

public static void main(String[] args) throws Exception {

    Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class);
    options.setRunner(FlinkRunner.class);

    Pipeline p = Pipeline.create(options);

    KafkaIO.Read<byte[], String> kafkaIOReader = KafkaIO.read().withBootstrapServers("192.168.99.100:32771")
            .withTopics(Arrays.asList("beam".split(",")))
            .updateConsumerProperties(ImmutableMap.of("auto.offset.reset", (Object) "earliest"))
            .withValueCoder(StringUtf8Coder.of());

    p.apply(kafkaIOReader.withoutMetadata()).apply(Values.<String>create())
            .apply(Window.<String>into(FixedWindows.of(Duration.standardMinutes(options.getWindowSize()))))
            .apply(new CountWords()).apply(MapElements.via(new FormatAsTextFn()))
            .apply("WriteCounts", TextIO.Write.to(options.getOutput()));

    p.run();/*from  w w  w.  ja v a2 s .  c o  m*/
}

From source file:com.datatorrent.benchmark.window.AbstractWindowedOperatorBenchmarkApp.java

License:Apache License

protected O createWindowedOperator(Configuration conf) {
    SpillableStateStore store = createStore(conf);
    try {//  w  ww . j av  a2  s .  c  om
        O windowedOperator = this.windowedOperatorClass.newInstance();
        SpillableComplexComponentImpl sccImpl = new SpillableComplexComponentImpl(store);
        windowedOperator.addComponent("SpillableComplexComponent", sccImpl);

        windowedOperator.setDataStorage(createDataStorage(sccImpl));
        windowedOperator.setRetractionStorage(createRetractionStorage(sccImpl));
        windowedOperator.setWindowStateStorage(new InMemoryWindowedStorage());
        setUpdatedKeyStorage(windowedOperator, conf, sccImpl);
        windowedOperator.setAccumulation(createAccumulation());

        windowedOperator.setAllowedLateness(Duration.millis(ALLOWED_LATENESS));
        windowedOperator.setWindowOption(new WindowOption.TimeWindows(Duration.standardMinutes(1)));
        //accumulating mode
        windowedOperator.setTriggerOption(
                TriggerOption.AtWatermark().withEarlyFiringsAtEvery(Duration.standardSeconds(1))
                        .accumulatingFiredPanes().firingOnlyUpdatedPanes());
        windowedOperator.setFixedWatermark(30000);
        //windowedOperator.setTriggerOption(TriggerOption.AtWatermark());

        return windowedOperator;
    } catch (Exception e) {
        throw Throwables.propagate(e);
    }
}

From source file:com.example.dataflow.TrafficMaxLaneFlow.java

License:Apache License

/**
 * Sets up and starts streaming pipeline.
 *
 * @throws IOException if there is a problem setting up resources
 *///from ww  w .  java 2s.  c  o m
public static void main(String[] args) throws IOException {
    TrafficMaxLaneFlowOptions options = PipelineOptionsFactory.fromArgs(args).withValidation()
            .as(TrafficMaxLaneFlowOptions.class);
    if (options.isStreaming()) {
        // In order to cancel the pipelines automatically,
        // {@literal DataflowPipelineRunner} is forced to be used.
        options.setRunner(DataflowRunner.class);
    }
    options.setBigQuerySchema(FormatMaxesFn.getSchema());
    // Using DataflowExampleUtils to set up required resources.
    DataflowExampleUtils dataflowUtils = new DataflowExampleUtils(options);
    dataflowUtils.setup();

    Pipeline pipeline = Pipeline.create(options);
    TableReference tableRef = new TableReference();
    tableRef.setProjectId(options.getProject());
    tableRef.setDatasetId(options.getBigQueryDataset());
    tableRef.setTableId(options.getBigQueryTable());

    PCollection<String> rawInput;
    PCollection<KV<String, LaneInfo>> input;

    if (options.isStreaming()) {
        rawInput = pipeline.apply("StreamFromPubsub",
                PubsubIO.readStrings().fromTopic(options.getPubsubTopic()));
    } else {
        rawInput = pipeline.apply("ReadFromFile", TextIO.read().from(options.getInputFile()));
    }

    // row... => <stationId, LaneInfo> ...
    // if isStreaming == true, do not emit timestamps. If false, emit timestamps.
    input = rawInput.apply("ExtractFlowInfo", ParDo.of(new ExtractFlowInfoFn(!options.isStreaming())));

    // map the incoming data stream into sliding windows.
    input.apply(Window
            .<KV<String, LaneInfo>>into(SlidingWindows.of(Duration.standardMinutes(options.getWindowDuration()))
                    .every(Duration.standardMinutes(options.getWindowSlideEvery()))))
            .apply(new MaxLaneFlow())
            .apply(BigQueryIO.<TableRow>writeTableRows().to(tableRef).withSchema(FormatMaxesFn.getSchema()));

    PipelineResult result = pipeline.run();
    // dataflowUtils will try to cancel the pipeline and the injector before the program exists.
    // dataflowUtils.waitToFinish(result);
}