List of usage examples for org.joda.time Duration standardHours
public static Duration standardHours(long hours)
From source file:kr.debop4j.timeperiod.tools.Durations.java
License:Apache License
/** * Hours duration./*w ww . j a va 2 s . c o m*/ * * @param hours the hours * @return the duration */ public static Duration hours(int hours) { return Duration.standardHours(hours); }
From source file:news.NewsExtensionController.java
License:Apache License
/** * Generate a list of articles from the database. Listed in descending (newest to oldest) order. * * @param genNum required parameter used to limit the results returned. Can be larger than * the actual number of objects in the database. * @return the list of objects found as a json structure. *///from w w w. jav a 2s .co m @Route(method = HttpMethod.GET, uri = "/news/list/generated/{genNum}") public Result generate(@Parameter("genNum") int genNum) { String uri = context().request().uri(); Result cached = (Result) cache.get(uri); if (cached != null && !HeaderNames.NOCACHE_VALUE.equalsIgnoreCase(context().header(HeaderNames.CACHE_CONTROL))) { return cached; } List<NewsArticle> list = new LinkedList<NewsArticle>(); List<NewsArticle> query = newsArticleCrud.query(new OSQLSynchQuery<NewsArticle>( "select *" + " from NewsArticle order by dateModified DESC limit " + genNum)); list.addAll(query); Result result = ok(list).json(); cache.set(uri, result, Duration.standardHours(1)); return result; }
From source file:org.apache.beam.examples.snippets.Snippets.java
License:Apache License
public static void fileProcessPattern() throws Exception { Pipeline p = Pipeline.create();/*from ww w .j a va2 s .c om*/ // [START FileProcessPatternProcessNewFilesSnip1] // This produces PCollection<MatchResult.Metadata> p.apply(FileIO.match().filepattern("...").continuously(Duration.standardSeconds(30), Watch.Growth.afterTimeSinceNewOutput(Duration.standardHours(1)))); // [END FileProcessPatternProcessNewFilesSnip1] // [START FileProcessPatternProcessNewFilesSnip2] // This produces PCollection<String> p.apply(TextIO.read().from("<path-to-files>/*").watchForNewFiles( // Check for new files every minute. Duration.standardMinutes(1), // Stop watching the file pattern if no new files appear for an hour. Watch.Growth.afterTimeSinceNewOutput(Duration.standardHours(1)))); // [END FileProcessPatternProcessNewFilesSnip2] // [START FileProcessPatternAccessMetadataSnip1] p.apply(FileIO.match().filepattern("hdfs://path/to/*.gz")) // The withCompression method is optional. By default, the Beam SDK detects compression from // the filename. .apply(FileIO.readMatches().withCompression(Compression.GZIP)) .apply(ParDo.of(new DoFn<FileIO.ReadableFile, String>() { @ProcessElement public void process(@Element FileIO.ReadableFile file) { // We can now access the file and its metadata. LOG.info("File Metadata resourceId is {} ", file.getMetadata().resourceId()); } })); // [END FileProcessPatternAccessMetadataSnip1] }
From source file:org.gravidence.gravifon.schedule.RemoveUsersFailedToCompleteRegistrationTask.java
License:Open Source License
/** * Performs task job.//w w w . ja v a 2 s.c om */ @Scheduled(cron = "${org.gravidence.gravifon.schedule.removeUsersFailedToCompleteRegistration.cron}") public void run() { Date start = new Date(); int i = 0; try { List<UserDocument> users = usersDbClient .retrieveUsersFailedToCompleteRegistration(Duration.standardHours(threshold)); if (CollectionUtils.isNotEmpty(users)) { for (; i < users.size(); i++) { UserDocument user = users.get(i); usersDbClient.delete(user); LOGGER.trace("{} user removed", user); } } } catch (Exception e) { LOGGER.warn("Exception occurred during task execution", e); } Date end = new Date(); LOGGER.info("Task executed (duration: {} ms, users removed: {})", end.getTime() - start.getTime(), i); }
From source file:org.hawkular.metrics.core.jobs.TempDataCompressor.java
License:Apache License
@Override public Completable call(JobDetails jobDetails) { Duration runtimeBlockSize = Duration.standardHours(2); Trigger trigger = jobDetails.getTrigger(); DateTime timeSliceInclusive = new DateTime(trigger.getTriggerTime(), DateTimeZone.UTC) .minus(runtimeBlockSize);// w w w. jav a2 s. c om // Rewind to previous timeslice DateTime timeSliceStart = DateTimeService.getTimeSlice(timeSliceInclusive, runtimeBlockSize); long startOfSlice = timeSliceStart.getMillis(); Stopwatch stopwatch = Stopwatch.createStarted(); logger.infof("Starting to process temp table for starting time of %s", timeSliceStart.toString()); // TODO Optimization - new worker per token - use parallelism in Cassandra (with configured parallelism) return metricsService.compressBlock(startOfSlice, pageSize, maxReadConcurrency).doOnCompleted(() -> { stopwatch.stop(); logger.info("Finished processing data in " + stopwatch.elapsed(TimeUnit.MILLISECONDS) + " ms"); }); }
From source file:org.springframework.analytics.metrics.redis.RedisAggregateCounterRepository.java
License:Apache License
/** * For each query, we need to convert the interval into two variations. One is the start and end points rounded to * the resolution (used to calculate the number of entries to be returned from the query). The second is the start * and end buckets we have to retrieve which may contain entries for the interval. For example, when querying * at day resolution, the number of entries is the number of Joda time days between the start (rounded down to a * day boundary) and the end plus one day (also rounded down). However, we need load the data from the buckets * from the month the start day occurs in to the month end day occurs in. These are then concatenated, using the * start day as the start index into the first array, and writing the total number of entries in sequence from that * point into the combined result counts array. *//*from www . j a v a2 s. c o m*/ @Override public AggregateCounter getCounts(String name, Interval interval, AggregateCounterResolution resolution) { DateTime end = interval.getEnd(); Chronology c = interval.getChronology(); long[] counts; if (resolution == AggregateCounterResolution.minute) { // Iterate through each hour in the interval and load the minutes for it MutableDateTime dt = new MutableDateTime(interval.getStart()); dt.setRounding(c.hourOfDay()); Duration step = Duration.standardHours(1); List<long[]> hours = new ArrayList<long[]>(); while (dt.isBefore(end) || dt.isEqual(end)) { hours.add(getMinCountsForHour(name, dt)); dt.add(step); } counts = MetricUtils.concatArrays(hours, interval.getStart().getMinuteOfHour(), interval.toPeriod().toStandardMinutes().getMinutes() + 1); } else if (resolution == AggregateCounterResolution.hour) { DateTime cursor = new DateTime(c.dayOfMonth().roundFloor(interval.getStart().getMillis())); List<long[]> days = new ArrayList<long[]>(); Duration step = Duration.standardHours(24); while (cursor.isBefore(end)) { days.add(getHourCountsForDay(name, cursor)); cursor = cursor.plus(step); } counts = MetricUtils.concatArrays(days, interval.getStart().getHourOfDay(), interval.toPeriod().toStandardHours().getHours() + 1); } else if (resolution == AggregateCounterResolution.day) { DateTime startDay = new DateTime(c.dayOfYear().roundFloor(interval.getStart().getMillis())); DateTime endDay = new DateTime(c.dayOfYear().roundFloor(end.plusDays(1).getMillis())); int nDays = Days.daysBetween(startDay, endDay).getDays(); DateTime cursor = new DateTime(c.monthOfYear().roundFloor(interval.getStart().getMillis())); List<long[]> months = new ArrayList<long[]>(); DateTime endMonth = new DateTime( c.monthOfYear().roundCeiling(interval.getEnd().plusMonths(1).getMillis())); while (cursor.isBefore(endMonth)) { months.add(getDayCountsForMonth(name, cursor)); cursor = cursor.plusMonths(1); } counts = MetricUtils.concatArrays(months, interval.getStart().getDayOfMonth() - 1, nDays); } else if (resolution == AggregateCounterResolution.month) { DateTime startMonth = new DateTime(c.monthOfYear().roundFloor(interval.getStartMillis())); DateTime endMonth = new DateTime(c.monthOfYear().roundFloor(end.plusMonths(1).getMillis())); int nMonths = Months.monthsBetween(startMonth, endMonth).getMonths(); DateTime cursor = new DateTime(c.year().roundFloor(interval.getStartMillis())); List<long[]> years = new ArrayList<long[]>(); DateTime endYear = new DateTime(c.year().roundCeiling(interval.getEnd().plusYears(1).getMillis())); while (cursor.isBefore(endYear)) { years.add(getMonthCountsForYear(name, cursor)); cursor = cursor.plusYears(1); } counts = MetricUtils.concatArrays(years, interval.getStart().getMonthOfYear() - 1, nMonths); } else if (resolution == AggregateCounterResolution.year) { DateTime startYear = new DateTime(interval.getStart().getYear(), 1, 1, 0, 0); DateTime endYear = new DateTime(end.getYear() + 1, 1, 1, 0, 0); int nYears = Years.yearsBetween(startYear, endYear).getYears(); Map<String, Long> yearCounts = getYearCounts(name); counts = new long[nYears]; for (int i = 0; i < nYears; i++) { int year = startYear.plusYears(i).getYear(); Long count = yearCounts.get(Integer.toString(year)); if (count == null) { count = 0L; } counts[i] = count; } } else { throw new IllegalStateException("Shouldn't happen. Unhandled resolution: " + resolution); } return new AggregateCounter(name, interval, counts, resolution); }
From source file:org.springframework.xd.analytics.metrics.redis.RedisAggregateCounterRepository.java
License:Apache License
/** * For each query, we need to convert the interval into two variations. One is the start and end points rounded to * the resolution (used to calculate the number of entries to be returned from the query). The second is the start * and end buckets we have to retrieve which may contain entries for the interval. For example, when querying * at day resolution, the number of entries is the number of Joda time days between the start (rounded down to a * day boundary) and the end plus one day (also rounded down). However, we need load the data from the buckets * from the month the start day occurs in to the month end day occurs in. These are then concatenated, using the * start day as the start index into the first array, and writing the total number of entries in sequence from that * point into the combined result counts array. *//*from w w w. ja v a2 s . c o m*/ @Override public AggregateCount getCounts(String name, Interval interval, AggregateCountResolution resolution) { DateTime end = interval.getEnd(); Chronology c = interval.getChronology(); long[] counts; if (resolution == AggregateCountResolution.minute) { // Iterate through each hour in the interval and load the minutes for it MutableDateTime dt = new MutableDateTime(interval.getStart()); dt.setRounding(c.hourOfDay()); Duration step = Duration.standardHours(1); List<long[]> hours = new ArrayList<long[]>(); while (dt.isBefore(end) || dt.isEqual(end)) { hours.add(getMinCountsForHour(name, dt)); dt.add(step); } counts = MetricUtils.concatArrays(hours, interval.getStart().getMinuteOfHour(), interval.toPeriod().toStandardMinutes().getMinutes() + 1); } else if (resolution == AggregateCountResolution.hour) { DateTime cursor = new DateTime(c.dayOfMonth().roundFloor(interval.getStart().getMillis())); List<long[]> days = new ArrayList<long[]>(); Duration step = Duration.standardHours(24); while (cursor.isBefore(end)) { days.add(getHourCountsForDay(name, cursor)); cursor = cursor.plus(step); } counts = MetricUtils.concatArrays(days, interval.getStart().getHourOfDay(), interval.toPeriod().toStandardHours().getHours() + 1); } else if (resolution == AggregateCountResolution.day) { DateTime startDay = new DateTime(c.dayOfYear().roundFloor(interval.getStart().getMillis())); DateTime endDay = new DateTime(c.dayOfYear().roundFloor(end.plusDays(1).getMillis())); int nDays = Days.daysBetween(startDay, endDay).getDays(); DateTime cursor = new DateTime(c.monthOfYear().roundFloor(interval.getStart().getMillis())); List<long[]> months = new ArrayList<long[]>(); DateTime endMonth = new DateTime( c.monthOfYear().roundCeiling(interval.getEnd().plusMonths(1).getMillis())); while (cursor.isBefore(endMonth)) { months.add(getDayCountsForMonth(name, cursor)); cursor = cursor.plusMonths(1); } counts = MetricUtils.concatArrays(months, interval.getStart().getDayOfMonth() - 1, nDays); } else if (resolution == AggregateCountResolution.month) { DateTime startMonth = new DateTime(c.monthOfYear().roundFloor(interval.getStartMillis())); DateTime endMonth = new DateTime(c.monthOfYear().roundFloor(end.plusMonths(1).getMillis())); int nMonths = Months.monthsBetween(startMonth, endMonth).getMonths(); DateTime cursor = new DateTime(c.year().roundFloor(interval.getStartMillis())); List<long[]> years = new ArrayList<long[]>(); DateTime endYear = new DateTime(c.year().roundCeiling(interval.getEnd().plusYears(1).getMillis())); while (cursor.isBefore(endYear)) { years.add(getMonthCountsForYear(name, cursor)); cursor = cursor.plusYears(1); } counts = MetricUtils.concatArrays(years, interval.getStart().getMonthOfYear() - 1, nMonths); } else if (resolution == AggregateCountResolution.year) { DateTime startYear = new DateTime(interval.getStart().getYear(), 1, 1, 0, 0); DateTime endYear = new DateTime(end.getYear() + 1, 1, 1, 0, 0); int nYears = Years.yearsBetween(startYear, endYear).getYears(); Map<String, Long> yearCounts = getYearCounts(name); counts = new long[nYears]; for (int i = 0; i < nYears; i++) { int year = startYear.plusYears(i).getYear(); Long count = yearCounts.get(Integer.toString(year)); if (count == null) { count = 0L; } counts[i] = count; } } else { throw new IllegalStateException("Shouldn't happen. Unhandled resolution: " + resolution); } return new AggregateCount(name, interval, counts, resolution); }