List of usage examples for org.joda.time DateTimeComparator getInstance
public static DateTimeComparator getInstance()
From source file:io.druid.indexing.overlord.TaskRunnerWorkItem.java
License:Apache License
@Override public int compareTo(TaskRunnerWorkItem taskRunnerWorkItem) { return ComparisonChain.start() .compare(createdTime, taskRunnerWorkItem.getCreatedTime(), DateTimeComparator.getInstance()) .compare(taskId, taskRunnerWorkItem.getTaskId()).result(); }
From source file:org.apache.druid.indexer.DetermineHashedPartitionsJob.java
License:Apache License
@Override public boolean run() { try {/*from ww w .ja v a 2 s. co m*/ /* * Group by (timestamp, dimensions) so we can correctly count dimension values as they would appear * in the final segment. */ startTime = System.currentTimeMillis(); groupByJob = Job.getInstance(new Configuration(), StringUtils .format("%s-determine_partitions_hashed-%s", config.getDataSource(), config.getIntervals())); JobHelper.injectSystemProperties(groupByJob); config.addJobProperties(groupByJob); groupByJob.setMapperClass(DetermineCardinalityMapper.class); groupByJob.setMapOutputKeyClass(LongWritable.class); groupByJob.setMapOutputValueClass(BytesWritable.class); groupByJob.setReducerClass(DetermineCardinalityReducer.class); groupByJob.setOutputKeyClass(NullWritable.class); groupByJob.setOutputValueClass(NullWritable.class); groupByJob.setOutputFormatClass(SequenceFileOutputFormat.class); groupByJob.setPartitionerClass(DetermineHashedPartitionsPartitioner.class); if (!config.getSegmentGranularIntervals().isPresent()) { groupByJob.setNumReduceTasks(1); } else { groupByJob.setNumReduceTasks(config.getSegmentGranularIntervals().get().size()); } JobHelper.setupClasspath(JobHelper.distributedClassPath(config.getWorkingPath()), JobHelper.distributedClassPath(config.makeIntermediatePath()), groupByJob); config.addInputPaths(groupByJob); config.intoConfiguration(groupByJob); FileOutputFormat.setOutputPath(groupByJob, config.makeGroupedDataDir()); groupByJob.submit(); log.info("Job %s submitted, status available at: %s", groupByJob.getJobName(), groupByJob.getTrackingURL()); // Store the jobId in the file if (groupByJob.getJobID() != null) { JobHelper.writeJobIdToFile(config.getHadoopJobIdFileName(), groupByJob.getJobID().toString()); } try { if (!groupByJob.waitForCompletion(true)) { log.error("Job failed: %s", groupByJob.getJobID()); failureCause = Utils.getFailureMessage(groupByJob, config.JSON_MAPPER); return false; } } catch (IOException ioe) { if (!Utils.checkAppSuccessForJobIOException(ioe, groupByJob, config.isUseYarnRMJobStatusFallback())) { throw ioe; } } /* * Load partitions and intervals determined by the previous job. */ log.info("Job completed, loading up partitions for intervals[%s].", config.getSegmentGranularIntervals()); FileSystem fileSystem = null; if (!config.getSegmentGranularIntervals().isPresent()) { final Path intervalInfoPath = config.makeIntervalInfoPath(); fileSystem = intervalInfoPath.getFileSystem(groupByJob.getConfiguration()); if (!Utils.exists(groupByJob, fileSystem, intervalInfoPath)) { throw new ISE("Path[%s] didn't exist!?", intervalInfoPath); } List<Interval> intervals = config.JSON_MAPPER.readValue( Utils.openInputStream(groupByJob, intervalInfoPath), new TypeReference<List<Interval>>() { }); config.setGranularitySpec( new UniformGranularitySpec(config.getGranularitySpec().getSegmentGranularity(), config.getGranularitySpec().getQueryGranularity(), config.getGranularitySpec().isRollup(), intervals)); log.info("Determined Intervals for Job [%s].", config.getSegmentGranularIntervals()); } Map<Long, List<HadoopyShardSpec>> shardSpecs = new TreeMap<>(DateTimeComparator.getInstance()); int shardCount = 0; for (Interval segmentGranularity : config.getSegmentGranularIntervals().get()) { DateTime bucket = segmentGranularity.getStart(); final Path partitionInfoPath = config.makeSegmentPartitionInfoPath(segmentGranularity); if (fileSystem == null) { fileSystem = partitionInfoPath.getFileSystem(groupByJob.getConfiguration()); } if (Utils.exists(groupByJob, fileSystem, partitionInfoPath)) { final Long numRows = config.JSON_MAPPER .readValue(Utils.openInputStream(groupByJob, partitionInfoPath), Long.class); log.info("Found approximately [%,d] rows in data.", numRows); final int numberOfShards = (int) Math.ceil((double) numRows / config.getTargetPartitionSize()); log.info("Creating [%,d] shards", numberOfShards); List<HadoopyShardSpec> actualSpecs = Lists.newArrayListWithExpectedSize(numberOfShards); for (int i = 0; i < numberOfShards; ++i) { actualSpecs.add(new HadoopyShardSpec(new HashBasedNumberedShardSpec(i, numberOfShards, null, HadoopDruidIndexerConfig.JSON_MAPPER), shardCount++)); log.info("DateTime[%s], partition[%d], spec[%s]", bucket, i, actualSpecs.get(i)); } shardSpecs.put(bucket.getMillis(), actualSpecs); } else { log.info("Path[%s] didn't exist!?", partitionInfoPath); } } config.setShardSpecs(shardSpecs); log.info("DetermineHashedPartitionsJob took %d millis", (System.currentTimeMillis() - startTime)); return true; } catch (Exception e) { throw new RuntimeException(e); } }
From source file:org.kuali.student.enrollment.class2.appointment.service.impl.AppointmentViewHelperServiceImpl.java
License:Educational Community License
private boolean validateTime(String startTime, String startAmPm, String endTime, String endAmPm) { //Set Date objects KSDateTimeFormatter timeFormatter = new KSDateTimeFormatter("hh:mm aa"); DateTime startingTime = timeFormatter.getFormatter().parseDateTime(startTime + " " + startAmPm); DateTime endingTime = timeFormatter.getFormatter().parseDateTime(endTime + " " + endAmPm); //Compare and throw exception if start time is after end time if (DateTimeComparator.getInstance().compare(startingTime, endingTime) < 0) { return true; } else {/* ww w.java2 s. c o m*/ return false; } }
From source file:org.kuali.student.enrollment.class2.courseoffering.helper.impl.ExamOfferingScheduleHelperImpl.java
License:Educational Community License
/** * This method valids if a start time is prior to an end time * * @param requestedSchedule//ww w.j a v a2s . co m * @param examOfferingPath * * @return ScheduleRequestSetInfo */ private boolean validateTime(ScheduleWrapper requestedSchedule, String examOfferingPath) { DateTime startTime = null; DateTime endTime = null; boolean success = true; KSDateTimeFormatter timeFormatter = new KSDateTimeFormatter("hh:mm aa"); if (StringUtils.isNotBlank(requestedSchedule.getStartTime())) { try { startTime = timeFormatter.getFormatter().parseDateTime(requestedSchedule.getStartTime()); } catch (Exception e) { GlobalVariables.getMessageMap().putError(examOfferingPath + ".requestedSchedule.startTime", ExamOfferingConstants.EXAM_OFFERING_MSG_ERROR_SCHEDULING_INVALID_START_TIME); success = false; } } else { GlobalVariables.getMessageMap().putError(examOfferingPath + ".requestedSchedule.startTime", ExamOfferingConstants.EXAM_OFFERING_MSG_ERROR_SCHEDULING_REQUIRED_START_TIME_FIELD_EMPTY); success = false; } if (StringUtils.isNotBlank(requestedSchedule.getEndTime())) { try { endTime = timeFormatter.getFormatter().parseDateTime(requestedSchedule.getEndTime()); } catch (Exception e) { GlobalVariables.getMessageMap().putError(examOfferingPath + ".requestedSchedule.endTime", ExamOfferingConstants.EXAM_OFFERING_MSG_ERROR_SCHEDULING_INVALID_END_TIME); success = false; } } else { GlobalVariables.getMessageMap().putError(examOfferingPath + ".requestedSchedule.endTime", ExamOfferingConstants.EXAM_OFFERING_MSG_ERROR_SCHEDULING_REQUIRED_END_TIME_FIELD_EMPTY); success = false; } if (DateTimeComparator.getInstance().compare(startTime, endTime) > 0) { GlobalVariables.getMessageMap().putError(examOfferingPath + ".requestedSchedule.startTime", ExamOfferingConstants.EXAM_OFFERING_MSG_ERROR_SCHEDULING_INVALID_START_END_TIME); success = false; } return success; }
From source file:org.restcomm.connect.dao.mybatis.MybatisExtensionsConfigurationDao.java
License:Open Source License
@Override public boolean isLatestVersionByName(String extensionName, DateTime dateTime) { final SqlSession session = sessions.openSession(); boolean result = false; int comp;//from w w w .java 2 s .co m try { final DateTime dateUpdated = new DateTime( session.selectOne(namespace + "getDateUpdatedByName", extensionName)); if (dateUpdated != null) { comp = DateTimeComparator.getInstance().compare(dateTime, dateUpdated); if (comp < 0) { //Negative value means that given dateTime is less than dateUpdated, which means that DB //has a newer cnfiguration result = true; } } } finally { session.close(); } return result; }
From source file:org.restcomm.connect.dao.mybatis.MybatisExtensionsConfigurationDao.java
License:Open Source License
@Override public boolean isLatestVersionBySid(Sid extensionSid, DateTime dateTime) { final SqlSession session = sessions.openSession(); boolean result = false; int comp;/*from w ww . j a v a 2 s . c o m*/ try { final DateTime dateUpdated = new DateTime( session.selectOne(namespace + "getDateUpdatedBySid", extensionSid.toString())); if (dateUpdated != null) { comp = DateTimeComparator.getInstance().compare(dateTime, dateUpdated); if (comp < 0) { //Negative value means that given dateTime is less than dateUpdated, which means that DB //has a newer cnfiguration result = true; } } } finally { session.close(); } return result; }
From source file:org.rhq.server.metrics.DateTimeService.java
License:Open Source License
private boolean hasTimeSliceEnded(DateTime startTime, Duration duration) { DateTime endTime = startTime.plus(duration); return DateTimeComparator.getInstance().compare(currentHour(), endTime) >= 0; }
From source file:uk.ac.cam.db538.cryptosms.storage.Conversation.java
License:Apache License
@Override public int compareTo(Conversation another) { try {// w w w. j ava 2 s . c o m return DateTimeComparator.getInstance().compare(this.getTimeStamp(), another.getTimeStamp()); } catch (Exception e) { return 0; } }
From source file:uk.co.bubblebearapps.motionaiclient.internal.di.modules.DomainModule.java
License:Apache License
@Provides
@Singleton
DateTimeComparator provideDateTimeComparator() {
return DateTimeComparator.getInstance();
}