List of usage examples for org.joda.time Interval contains
public boolean contains(long millisInstant)
From source file:com.marand.thinkmed.medications.administration.impl.AdministrationTaskCreatorImpl.java
License:Open Source License
private Opt<Pair<DateTime, TherapyDoseDto>> getTimedDoseForVariableDaysTherapy( final VariableSimpleTherapyDto therapy, final Interval taskCreationInterval, final AdministrationTaskCreateActionEnum action, final TimedSimpleDoseElementDto timedDoseElement, final DateTime date) { final HourMinuteDto doseTime = timedDoseElement.getDoseTime(); final DateTime administrationDateTime = date.withTimeAtStartOfDay().plusHours(doseTime.getHour()) .plusMinutes(doseTime.getMinute()); final TherapyDoseDto dose = getTherapyDoseForSimpleTherapy(timedDoseElement.getDoseElement(), therapy); final DateTime taskCreationIntervalEnd = taskCreationInterval.getEnd(); final boolean inTaskCreationInterval; //noinspection IfMayBeConditional if (action.isTaskCreationIntervalStartIncluded()) { inTaskCreationInterval = taskCreationInterval.contains(administrationDateTime) || taskCreationIntervalEnd.equals(administrationDateTime); } else {// ww w . j a v a 2s. c o m inTaskCreationInterval = taskCreationInterval.getStart().isBefore(administrationDateTime) && (taskCreationIntervalEnd.isAfter(administrationDateTime) || taskCreationIntervalEnd.equals(administrationDateTime)); } if (dose != null && dose.getNumerator() != null && inTaskCreationInterval) { return Opt.of(Pair.of(administrationDateTime, dose)); } return Opt.none(); }
From source file:com.marand.thinkmed.medications.business.impl.DefaultMedicationsBo.java
License:Open Source License
@Override public MedicationActionAction getInstructionAction(final MedicationOrderComposition composition, final MedicationInstructionInstruction instruction, final MedicationActionEnum searchActionEnum, @Nullable final Interval searchInterval) { if (composition.getMedicationDetail().getMedicationInstruction().size() == 1) { final List<MedicationActionAction> actions = composition.getMedicationDetail().getMedicationAction(); for (final MedicationActionAction action : actions) { final MedicationActionEnum actionEnum = MedicationActionEnum.getActionEnum(action); if (actionEnum == searchActionEnum) { final DateTime actionDateTime = DataValueUtils.getDateTime(action.getTime()); if (searchInterval == null || searchInterval.contains(actionDateTime)) { return action; }//from ww w.ja v a 2 s . c o m } } return null; } else { final String instructionPath = TdoPathable.pathOfItem(composition, instruction).getCanonicalString(); for (final MedicationActionAction action : composition.getMedicationDetail().getMedicationAction()) { if (action.getInstructionDetails().getInstructionId().getPath().equals(instructionPath)) { final MedicationActionEnum actionEnum = MedicationActionEnum.getActionEnum(action); if (searchInterval != null) { final DateTime actionDateTime = DataValueUtils.getDateTime(action.getTime()); if (searchInterval.contains(actionDateTime)) { if (actionEnum == searchActionEnum) { return action; } } } else { if (actionEnum == searchActionEnum) { return action; } } } } return null; } }
From source file:com.marand.thinkmed.medications.pharmacist.impl.PharmacistTaskProviderImpl.java
License:Open Source License
@Override public boolean therapyHasTasksClosedInInterval(@Nonnull final String patientId, @Nonnull final String originalTherapyId, @Nonnull final Set<TaskTypeEnum> taskTypesSet, @Nonnull final Interval interval) { Preconditions.checkNotNull(patientId, "patientId is null"); Preconditions.checkNotNull(originalTherapyId, "originalTherapyId is null"); Preconditions.checkNotNull(taskTypesSet, "taskTypesSet is null"); Preconditions.checkNotNull(interval, "interval is null"); final List<String> patientIdKeysForTaskTypes = TherapyTaskUtils .getPatientIdKeysForTaskTypes(Collections.singleton(patientId), taskTypesSet); final PartialList<TaskDto> tasks = processService.findTasks(null, null, null, true, null, null, patientIdKeysForTaskTypes, EnumSet.noneOf(TaskDetailsEnum.class), Pair.of(TherapyTaskDef.ORIGINAL_THERAPY_ID, originalTherapyId)); for (final TaskDto task : tasks) { if (task.isCompleted() && interval.contains(task.getEndTime())) { return true; }// w w w. j av a2 s. com } return false; }
From source file:com.marand.thinkmed.medications.service.impl.MedicationsServiceImpl.java
License:Open Source License
@Override @Transactional(readOnly = true)/* ww w . ja v a2 s.c o m*/ @ServiceMethod(auditing = @Auditing(level = Level.FULL)) @EhrSessioned public TherapyViewPatientDto getTherapyViewPatientData(@Nonnull final String patientId) { StringUtils.checkNotBlank(patientId, "patientId required"); final DateTime requestTimestamp = RequestContextHolder.getContext().getRequestTimestamp(); final TherapyViewPatientDto therapyViewPatientDto = new TherapyViewPatientDto(); final PatientDataForMedicationsDto patientData = patientDataProvider.getPatientData(patientId, requestTimestamp); therapyViewPatientDto.setPatientData(patientData); final MedicationsCentralCaseDto centralCaseDto = patientData.getCentralCaseDto(); final String careProviderId = centralCaseDto != null && centralCaseDto.getCareProvider() != null ? centralCaseDto.getCareProvider().getId() : null; final boolean inpatient = centralCaseDto != null && !centralCaseDto.isOutpatient(); final Interval referenceWeightSearchInterval; if (inpatient) { referenceWeightSearchInterval = new Interval(centralCaseDto.getCentralCaseEffective().getStart(), requestTimestamp); final Interval recentHospitalizationInterval = new Interval(requestTimestamp.minusHours(12), requestTimestamp); therapyViewPatientDto.setRecentHospitalization( recentHospitalizationInterval.contains(centralCaseDto.getCentralCaseEffective().getStart())); } else { referenceWeightSearchInterval = new Interval(requestTimestamp.minusHours(24), requestTimestamp); } final Double referenceWeight = medicationsOpenEhrDao.getPatientLastReferenceWeight(patientId, referenceWeightSearchInterval); therapyViewPatientDto.setReferenceWeight(referenceWeight); if (careProviderId != null) { final List<String> customGroups = medicationsDao.getCustomGroupNames(careProviderId); therapyViewPatientDto.setCustomGroups(customGroups); } final AdministrationTimingDto administrationTiming = MedicationPreferencesUtil .getAdministrationTiming(careProviderId); therapyViewPatientDto.setAdministrationTiming(administrationTiming); final RoundsIntervalDto roundsInterval = MedicationPreferencesUtil.getRoundsInterval(careProviderId); therapyViewPatientDto.setRoundsInterval(roundsInterval); final Long patientIdLong = MedicationsConnectorUtils.getId(patientId); if (patientIdLong != null) { final String lastLinkName = medicationsDao.getPatientLastLinkName(patientIdLong); therapyViewPatientDto.setLastLinkName(lastLinkName); } return therapyViewPatientDto; }
From source file:com.metamx.druid.http.ClientInfoResource.java
License:Open Source License
@GET @Path("/{dataSourceName}/dimensions") @Produces("application/json") public Iterable<String> getDatasourceDimensions(@PathParam("dataSourceName") String dataSourceName, @QueryParam("interval") String interval) { DruidDataSource dataSource = updateDataSources().get(dataSourceName); Set<String> retVal = Sets.newHashSet(); Interval dimInterval; if (interval == null || interval.isEmpty()) { Iterator<DataSegment> iter = Lists.reverse(Lists.newArrayList(dataSource.getSegments())).iterator(); DataSegment segment = iter.next(); retVal.addAll(segment.getDimensions()); dimInterval = new Interval(segment.getInterval().getEnd().minus(SEGMENT_HISTORY_MILLIS), segment.getInterval().getEnd()); while (iter.hasNext() && dimInterval.contains(segment.getInterval())) { retVal.addAll(segment.getDimensions()); segment = iter.next();/* w ww. ja v a 2 s.co m*/ } } else { try { dimInterval = new Interval(interval); } catch (Exception e) { throw new IAE("Interval is not in a parseable format!"); } Iterator<DataSegment> iter = dataSource.getSegments().iterator(); while (iter.hasNext()) { DataSegment segment = iter.next(); if (dimInterval.contains(segment.getInterval())) { retVal.addAll(segment.getDimensions()); } } } return retVal; }
From source file:com.metamx.druid.http.ClientInfoResource.java
License:Open Source License
@GET @Path("/{dataSourceName}/metrics") @Produces("application/json") public Iterable<String> getDatasourceMetrics(@PathParam("dataSourceName") String dataSourceName, @QueryParam("interval") String interval) { DruidDataSource dataSource = updateDataSources().get(dataSourceName); Set<String> retVal = Sets.newHashSet(); Interval dimInterval; if (interval == null || interval.isEmpty()) { Iterator<DataSegment> iter = Lists.reverse(Lists.newArrayList(dataSource.getSegments())).iterator(); DataSegment segment = iter.next(); retVal.addAll(segment.getMetrics()); dimInterval = new Interval(segment.getInterval().getEnd().minus(SEGMENT_HISTORY_MILLIS), segment.getInterval().getEnd()); while (iter.hasNext() && dimInterval.contains(segment.getInterval())) { retVal.addAll(segment.getMetrics()); segment = iter.next();//from ww w . j a va2 s .c om } } else { try { dimInterval = new Interval(interval); } catch (Exception e) { throw new IAE("Interval is not in a parseable format!"); } Iterator<DataSegment> iter = dataSource.getSegments().iterator(); while (iter.hasNext()) { DataSegment segment = iter.next(); if (dimInterval.contains(segment.getInterval())) { retVal.addAll(segment.getMetrics()); } } } return retVal; }
From source file:com.metamx.druid.index.v1.IndexMerger.java
License:Open Source License
public static File persist(final IncrementalIndex index, final Interval dataInterval, File outDir, ProgressIndicator progress) throws IOException { final long firstTimestamp = index.getMinTime().getMillis(); final long lastTimestamp = index.getMaxTime().getMillis(); if (!(dataInterval.contains(firstTimestamp) && dataInterval.contains(lastTimestamp))) { throw new IAE("interval[%s] does not encapsulate the full range of timestamps[%s, %s]", dataInterval, new DateTime(firstTimestamp), new DateTime(lastTimestamp)); }//from ww w . j ava2 s. c om if (!outDir.exists()) { outDir.mkdirs(); } if (!outDir.isDirectory()) { throw new ISE("Can only persist to directories, [%s] wasn't a directory", outDir); } log.info("Starting persist for interval[%s], rows[%,d]", dataInterval, index.size()); return merge(Arrays.<IndexableAdapter>asList(new IncrementalIndexAdapter(dataInterval, index)), index.getMetricAggs(), outDir, progress); }
From source file:com.metamx.druid.indexer.granularity.ArbitraryGranularitySpec.java
License:Open Source License
@Override public Optional<Interval> bucketInterval(DateTime dt) { // First interval with start time dt final Interval interval = intervals.floor(new Interval(dt, new DateTime(Long.MAX_VALUE))); if (interval != null && interval.contains(dt)) { return Optional.of(interval); } else {//from w w w . jav a 2 s .c o m return Optional.absent(); } }
From source file:com.metamx.druid.indexing.common.task.IndexDeterminePartitionsTask.java
License:Open Source License
@Override public TaskStatus run(TaskToolbox toolbox) throws Exception { log.info("Running with targetPartitionSize[%d]", targetPartitionSize); // TODO: Replace/merge/whatever with hadoop determine-partitions code // We know this exists final Interval interval = getImplicitLockInterval().get(); // Blacklist dimensions that have multiple values per row final Set<String> unusableDimensions = Sets.newHashSet(); // Track values of all non-blacklisted dimensions final Map<String, TreeMultiset<String>> dimensionValueMultisets = Maps.newHashMap(); // Load data// w w w . j a v a2s . c o m final Firehose firehose = firehoseFactory.connect(); try { while (firehose.hasMore()) { final InputRow inputRow = firehose.nextRow(); if (interval.contains(inputRow.getTimestampFromEpoch())) { // Extract dimensions from event for (final String dim : inputRow.getDimensions()) { final List<String> dimValues = inputRow.getDimension(dim); if (!unusableDimensions.contains(dim)) { if (dimValues.size() == 1) { // Track this value TreeMultiset<String> dimensionValueMultiset = dimensionValueMultisets.get(dim); if (dimensionValueMultiset == null) { dimensionValueMultiset = TreeMultiset.create(); dimensionValueMultisets.put(dim, dimensionValueMultiset); } dimensionValueMultiset.add(dimValues.get(0)); } else { // Only single-valued dimensions can be used for partitions unusableDimensions.add(dim); dimensionValueMultisets.remove(dim); } } } } } } finally { firehose.close(); } // ShardSpecs for index generator tasks final List<ShardSpec> shardSpecs = Lists.newArrayList(); // Select highest-cardinality dimension Ordering<Map.Entry<String, TreeMultiset<String>>> byCardinalityOrdering = new Ordering<Map.Entry<String, TreeMultiset<String>>>() { @Override public int compare(Map.Entry<String, TreeMultiset<String>> left, Map.Entry<String, TreeMultiset<String>> right) { return Ints.compare(left.getValue().elementSet().size(), right.getValue().elementSet().size()); } }; if (dimensionValueMultisets.isEmpty()) { // No suitable partition dimension. We'll make one big segment and hope for the best. log.info("No suitable partition dimension found"); shardSpecs.add(new NoneShardSpec()); } else { // Find best partition dimension (heuristic: highest cardinality). final Map.Entry<String, TreeMultiset<String>> partitionEntry = byCardinalityOrdering .max(dimensionValueMultisets.entrySet()); final String partitionDim = partitionEntry.getKey(); final TreeMultiset<String> partitionDimValues = partitionEntry.getValue(); log.info("Partitioning on dimension[%s] with cardinality[%d] over rows[%d]", partitionDim, partitionDimValues.elementSet().size(), partitionDimValues.size()); // Iterate over unique partition dimension values in sorted order String currentPartitionStart = null; int currentPartitionSize = 0; for (final String partitionDimValue : partitionDimValues.elementSet()) { currentPartitionSize += partitionDimValues.count(partitionDimValue); if (currentPartitionSize >= targetPartitionSize) { final ShardSpec shardSpec = new SingleDimensionShardSpec(partitionDim, currentPartitionStart, partitionDimValue, shardSpecs.size()); log.info("Adding shard: %s", shardSpec); shardSpecs.add(shardSpec); currentPartitionSize = partitionDimValues.count(partitionDimValue); currentPartitionStart = partitionDimValue; } } if (currentPartitionSize > 0) { // One last shard to go final ShardSpec shardSpec; if (shardSpecs.isEmpty()) { shardSpec = new NoneShardSpec(); } else { shardSpec = new SingleDimensionShardSpec(partitionDim, currentPartitionStart, null, shardSpecs.size()); } log.info("Adding shard: %s", shardSpec); shardSpecs.add(shardSpec); } } List<Task> nextTasks = Lists.transform(shardSpecs, new Function<ShardSpec, Task>() { @Override public Task apply(ShardSpec shardSpec) { return new IndexGeneratorTask(null, getGroupId(), getImplicitLockInterval().get(), firehoseFactory, new Schema(schema.getDataSource(), schema.getSpatialDimensions(), schema.getAggregators(), schema.getIndexGranularity(), shardSpec), rowFlushBoundary); } }); toolbox.getTaskActionClient().submit(new SpawnTasksAction(nextTasks)); return TaskStatus.success(getId()); }
From source file:com.metamx.druid.master.rules.PeriodDropRule.java
License:Open Source License
@Override public boolean appliesTo(DataSegment segment, DateTime referenceTimestamp) { final Interval currInterval = new Interval(period, referenceTimestamp); return currInterval.contains(segment.getInterval()); }