List of usage examples for org.joda.time Duration standardDays
public static Duration standardDays(long days)
From source file:org.apache.beam.learning.katas.triggers.windowaccummode.Task.java
License:Apache License
static PCollection<Long> applyTransform(PCollection<String> events) { return events .apply(Window.<String>into(FixedWindows.of(Duration.standardDays(1))) .triggering(AfterWatermark.pastEndOfWindow() .withEarlyFirings(AfterProcessingTime.pastFirstElementInPane())) .withAllowedLateness(Duration.ZERO).accumulatingFiredPanes()) .apply(Combine.globally(Count.<String>combineFn()).withoutDefaults()); }
From source file:org.apache.beam.learning.katas.windowing.fixedwindow.Task.java
License:Apache License
static PCollection<KV<String, Long>> applyTransform(PCollection<String> events) { return events.apply(Window.into(FixedWindows.of(Duration.standardDays(1)))).apply(Count.perElement()); }
From source file:org.apache.beam.sdk.extensions.sql.BeamSqlDslBase.java
License:Apache License
private PCollection<Row> prepareUnboundedPCollection1() { TestStream.Builder<Row> values = TestStream.create(schemaInTableA, SerializableFunctions.identity(), SerializableFunctions.identity()); for (Row row : rowsInTableA) { values = values.advanceWatermarkTo(new Instant(row.getDateTime("f_timestamp"))); values = values.addElements(row); }/*w w w . j av a 2s .co m*/ return PBegin.in(pipeline).apply("unboundedInput1", values.advanceWatermarkToInfinity()).apply( "unboundedInput1.fixedWindow1year", Window.into(FixedWindows.of(Duration.standardDays(365)))); }
From source file:org.apache.beam.sdk.extensions.sql.BeamSqlDslBase.java
License:Apache License
private PCollection<Row> prepareUnboundedPCollection2() { TestStream.Builder<Row> values = TestStream.create(schemaInTableA, SerializableFunctions.identity(), SerializableFunctions.identity()); Row row = rowsInTableA.get(0);/*w w w. java 2 s . c om*/ values = values.advanceWatermarkTo(new Instant(row.getDateTime("f_timestamp"))); values = values.addElements(row); return PBegin.in(pipeline).apply("unboundedInput2", values.advanceWatermarkToInfinity()).apply( "unboundedInput2.fixedWindow1year", Window.into(FixedWindows.of(Duration.standardDays(365)))); }
From source file:org.apache.beam.sdk.nexmark.NexmarkUtils.java
License:Apache License
/** Return a transform to reduce a stream to a single, order-invariant long hash. */ public static <T> PTransform<PCollection<T>, PCollection<Long>> hash(final long numEvents, String name) { return new PTransform<PCollection<T>, PCollection<Long>>(name) { @Override/*from ww w .jav a 2 s .com*/ public PCollection<Long> expand(PCollection<T> input) { return input .apply(Window.<T>into(new GlobalWindows()) .triggering(AfterPane.elementCountAtLeast((int) numEvents)) .withAllowedLateness(Duration.standardDays(1)).discardingFiredPanes()) .apply(name + ".Hash", ParDo.of(new DoFn<T, Long>() { @ProcessElement public void processElement(ProcessContext c) { long hash = Hashing.murmur3_128().newHasher().putLong(c.timestamp().getMillis()) .putString(c.element().toString(), StandardCharsets.UTF_8).hash().asLong(); c.output(hash); } })).apply(Combine.globally(new Combine.BinaryCombineFn<Long>() { @Override public Long apply(Long left, Long right) { return left ^ right; } })); } }; }
From source file:org.apache.beam.sdk.nexmark.queries.Query10.java
License:Apache License
@Override public PCollection<Done> expand(PCollection<Event> events) { final int numLogShards = maxNumWorkers * NUM_SHARDS_PER_WORKER; return events.apply(name + ".ShardEvents", ParDo.of(new DoFn<Event, KV<String, Event>>() { private final Counter lateCounter = Metrics.counter(name, "actuallyLateEvent"); private final Counter onTimeCounter = Metrics.counter(name, "onTimeCounter"); @ProcessElement//w ww . ja va2 s . c o m public void processElement(ProcessContext c) { if (c.element().hasAnnotation("LATE")) { lateCounter.inc(); LOG.info("Observed late: %s", c.element()); } else { onTimeCounter.inc(); } int shardNum = (int) Math.abs((long) c.element().hashCode() % numLogShards); String shard = String.format("shard-%05d-of-%05d", shardNum, numLogShards); c.output(KV.of(shard, c.element())); } })).apply(name + ".WindowEvents", Window .<KV<String, Event>>into(FixedWindows.of(Duration.standardSeconds(configuration.windowSizeSec))) .triggering(AfterEach.inOrder( Repeatedly.forever(AfterPane.elementCountAtLeast(configuration.maxLogEvents)) .orFinally(AfterWatermark.pastEndOfWindow()), Repeatedly.forever(AfterFirst.of(AfterPane.elementCountAtLeast(configuration.maxLogEvents), AfterProcessingTime.pastFirstElementInPane().plusDelayOf(LATE_BATCHING_PERIOD))))) .discardingFiredPanes() // Use a 1 day allowed lateness so that any forgotten hold will stall the // pipeline for that period and be very noticeable. .withAllowedLateness(Duration.standardDays(1))).apply(name + ".GroupByKey", GroupByKey.create()) .apply(name + ".CheckForLateEvents", ParDo.of(new DoFn<KV<String, Iterable<Event>>, KV<String, Iterable<Event>>>() { private final Counter earlyCounter = Metrics.counter(name, "earlyShard"); private final Counter onTimeCounter = Metrics.counter(name, "onTimeShard"); private final Counter lateCounter = Metrics.counter(name, "lateShard"); private final Counter unexpectedLatePaneCounter = Metrics.counter(name, "ERROR_unexpectedLatePane"); private final Counter unexpectedOnTimeElementCounter = Metrics.counter(name, "ERROR_unexpectedOnTimeElement"); @ProcessElement public void processElement(ProcessContext c, BoundedWindow window) { int numLate = 0; int numOnTime = 0; for (Event event : c.element().getValue()) { if (event.hasAnnotation("LATE")) { numLate++; } else { numOnTime++; } } String shard = c.element().getKey(); LOG.info(String.format( "%s with timestamp %s has %d actually late and %d on-time " + "elements in pane %s for window %s", shard, c.timestamp(), numLate, numOnTime, c.pane(), window.maxTimestamp())); if (c.pane().getTiming() == PaneInfo.Timing.LATE) { if (numLate == 0) { LOG.error("ERROR! No late events in late pane for %s", shard); unexpectedLatePaneCounter.inc(); } if (numOnTime > 0) { LOG.error("ERROR! Have %d on-time events in late pane for %s", numOnTime, shard); unexpectedOnTimeElementCounter.inc(); } lateCounter.inc(); } else if (c.pane().getTiming() == PaneInfo.Timing.EARLY) { if (numOnTime + numLate < configuration.maxLogEvents) { LOG.error("ERROR! Only have %d events in early pane for %s", numOnTime + numLate, shard); } earlyCounter.inc(); } else { onTimeCounter.inc(); } c.output(c.element()); } })) .apply(name + ".UploadEvents", ParDo.of(new DoFn<KV<String, Iterable<Event>>, KV<Void, OutputFile>>() { private final Counter savedFileCounter = Metrics.counter(name, "savedFile"); private final Counter writtenRecordsCounter = Metrics.counter(name, "writtenRecords"); @ProcessElement public void processElement(ProcessContext c, BoundedWindow window) throws IOException { String shard = c.element().getKey(); GcsOptions options = c.getPipelineOptions().as(GcsOptions.class); OutputFile outputFile = outputFileFor(window, shard, c.pane()); LOG.info(String.format( "Writing %s with record timestamp %s, window timestamp %s, pane %s", shard, c.timestamp(), window.maxTimestamp(), c.pane())); if (outputFile.filename != null) { LOG.info("Beginning write to '%s'", outputFile.filename); int n = 0; try (OutputStream output = Channels .newOutputStream(openWritableGcsFile(options, outputFile.filename))) { for (Event event : c.element().getValue()) { Event.CODER.encode(event, output, Coder.Context.OUTER); writtenRecordsCounter.inc(); if (++n % 10000 == 0) { LOG.info("So far written %d records to '%s'", n, outputFile.filename); } } } LOG.info("Written all %d records to '%s'", n, outputFile.filename); } savedFileCounter.inc(); c.output(KV.of(null, outputFile)); } })) // Clear fancy triggering from above. .apply(name + ".WindowLogFiles", Window.<KV<Void, OutputFile>>into( FixedWindows.of(Duration.standardSeconds(configuration.windowSizeSec))) .triggering(AfterWatermark.pastEndOfWindow()) // We expect no late data here, but we'll assume the worst so we can detect any. .withAllowedLateness(Duration.standardDays(1)).discardingFiredPanes()) // this GroupByKey allows to have one file per window .apply(name + ".GroupByKey2", GroupByKey.create()) .apply(name + ".Index", ParDo.of(new DoFn<KV<Void, Iterable<OutputFile>>, Done>() { private final Counter unexpectedLateCounter = Metrics.counter(name, "ERROR_unexpectedLate"); private final Counter unexpectedEarlyCounter = Metrics.counter(name, "ERROR_unexpectedEarly"); private final Counter unexpectedIndexCounter = Metrics.counter(name, "ERROR_unexpectedIndex"); private final Counter finalizedCounter = Metrics.counter(name, "indexed"); @ProcessElement public void processElement(ProcessContext c, BoundedWindow window) throws IOException { if (c.pane().getTiming() == Timing.LATE) { unexpectedLateCounter.inc(); LOG.error("ERROR! Unexpected LATE pane: %s", c.pane()); } else if (c.pane().getTiming() == Timing.EARLY) { unexpectedEarlyCounter.inc(); LOG.error("ERROR! Unexpected EARLY pane: %s", c.pane()); } else if (c.pane().getTiming() == Timing.ON_TIME && c.pane().getIndex() != 0) { unexpectedIndexCounter.inc(); LOG.error("ERROR! Unexpected ON_TIME pane index: %s", c.pane()); } else { GcsOptions options = c.getPipelineOptions().as(GcsOptions.class); LOG.info("Index with record timestamp %s, window timestamp %s, pane %s", c.timestamp(), window.maxTimestamp(), c.pane()); @Nullable String filename = indexPathFor(window); if (filename != null) { LOG.info("Beginning write to '%s'", filename); int n = 0; try (OutputStream output = Channels .newOutputStream(openWritableGcsFile(options, filename))) { for (OutputFile outputFile : c.element().getValue()) { output.write(outputFile.toString().getBytes(StandardCharsets.UTF_8)); n++; } } LOG.info("Written all %d lines to '%s'", n, filename); } c.output(new Done("written for timestamp " + window.maxTimestamp())); finalizedCounter.inc(); } } })); }
From source file:org.apache.beam.sdk.schemas.utils.AvroUtils.java
License:Apache License
private static Object convertDateStrict(Integer epochDays, Schema.FieldType fieldType) { checkTypeName(fieldType.getTypeName(), TypeName.DATETIME, "date"); return Instant.EPOCH.plus(Duration.standardDays(epochDays)); }
From source file:org.apache.beam.sdk.testutils.metrics.MetricsReader.java
License:Apache License
/** * timestamp metrics are used to monitor time of execution of transforms. If result timestamp * metric is too far from now, consider that metric is erroneous private boolean isCredible(long * value) {//from www .j a v a 2s .co m */ private boolean isCredible(long value) { return (Math.abs(value - now) <= Duration.standardDays(10000).getMillis()); }
From source file:org.apache.druid.server.log.FileRequestLogger.java
License:Apache License
@LifecycleStart @Override// w ww . j av a 2s.co m public void start() { try { baseDir.mkdirs(); MutableDateTime mutableDateTime = DateTimes.nowUtc().toMutableDateTime(ISOChronology.getInstanceUTC()); mutableDateTime.setMillisOfDay(0); synchronized (lock) { currentDay = mutableDateTime.toDateTime(ISOChronology.getInstanceUTC()); fileWriter = getFileWriter(); } long nextDay = currentDay.plusDays(1).getMillis(); Duration initialDelay = new Duration(nextDay - System.currentTimeMillis()); ScheduledExecutors.scheduleWithFixedDelay(exec, initialDelay, Duration.standardDays(1), new Callable<ScheduledExecutors.Signal>() { @Override public ScheduledExecutors.Signal call() { try { synchronized (lock) { currentDay = currentDay.plusDays(1); CloseQuietly.close(fileWriter); fileWriter = getFileWriter(); } } catch (Exception e) { throw new RuntimeException(e); } return ScheduledExecutors.Signal.REPEAT; } }); } catch (IOException e) { throw new RuntimeException(e); } }
From source file:org.graylog2.rest.resources.system.TrafficResource.java
License:Open Source License
@GET @ApiOperation(value = "Get the cluster traffic stats") public TrafficCounterService.TrafficHistogram get( @ApiParam(name = "days", value = "For how many days the traffic stats should be returned") @QueryParam("days") @DefaultValue("30") int days, @ApiParam(name = "daily", value = "Whether the traffic should be aggregate to daily values") @QueryParam("daily") @DefaultValue("false") boolean daily) { final TrafficCounterService.TrafficHistogram trafficHistogram = trafficCounterService .clusterTrafficOfLastDays(Duration.standardDays(days), daily ? TrafficCounterService.Interval.DAILY : TrafficCounterService.Interval.HOURLY); return trafficHistogram; }