List of usage examples for org.joda.time Interval getEnd
public DateTime getEnd()
From source file:org.apache.druid.server.ClientInfoResource.java
License:Apache License
@GET @Path("/{dataSourceName}") @Produces(MediaType.APPLICATION_JSON)//from ww w . j a va 2 s. com @ResourceFilters(DatasourceResourceFilter.class) public Map<String, Object> getDatasource(@PathParam("dataSourceName") String dataSourceName, @QueryParam("interval") String interval, @QueryParam("full") String full) { if (full == null) { return ImmutableMap.of(KEY_DIMENSIONS, getDataSourceDimensions(dataSourceName, interval), KEY_METRICS, getDataSourceMetrics(dataSourceName, interval)); } Interval theInterval; if (interval == null || interval.isEmpty()) { DateTime now = getCurrentTime(); theInterval = new Interval(segmentMetadataQueryConfig.getDefaultHistory(), now); } else { theInterval = Intervals.of(interval); } TimelineLookup<String, ServerSelector> timeline = timelineServerView .getTimeline(new TableDataSource(dataSourceName)); Iterable<TimelineObjectHolder<String, ServerSelector>> serversLookup = timeline != null ? timeline.lookup(theInterval) : null; if (serversLookup == null || Iterables.isEmpty(serversLookup)) { return Collections.EMPTY_MAP; } Map<Interval, Object> servedIntervals = new TreeMap<>(new Comparator<Interval>() { @Override public int compare(Interval o1, Interval o2) { if (o1.equals(o2) || o1.overlaps(o2)) { return 0; } else { return o1.isBefore(o2) ? -1 : 1; } } }); for (TimelineObjectHolder<String, ServerSelector> holder : serversLookup) { final Set<Object> dimensions = new HashSet<>(); final Set<Object> metrics = new HashSet<>(); final PartitionHolder<ServerSelector> partitionHolder = holder.getObject(); if (partitionHolder.isComplete()) { for (ServerSelector server : partitionHolder.payloads()) { final DataSegment segment = server.getSegment(); dimensions.addAll(segment.getDimensions()); metrics.addAll(segment.getMetrics()); } } servedIntervals.put(holder.getInterval(), ImmutableMap.of(KEY_DIMENSIONS, dimensions, KEY_METRICS, metrics)); } //collapse intervals if they abut and have same set of columns Map<String, Object> result = Maps.newLinkedHashMap(); Interval curr = null; Map<String, Set<String>> cols = null; for (Map.Entry<Interval, Object> e : servedIntervals.entrySet()) { Interval ival = e.getKey(); if (curr != null && curr.abuts(ival) && cols.equals(e.getValue())) { curr = curr.withEnd(ival.getEnd()); } else { if (curr != null) { result.put(curr.toString(), cols); } curr = ival; cols = (Map<String, Set<String>>) e.getValue(); } } //add the last one in if (curr != null) { result.put(curr.toString(), cols); } return result; }
From source file:org.apache.druid.server.coordinator.helper.NewestSegmentFirstIterator.java
License:Apache License
/** * Returns the initial searchInterval which is {@code (timeline.first().start, timeline.last().end - skipOffset)}. * * @param timeline timeline of a dataSource * @param skipIntervals intervals to skip * * @return found interval to search or null if it's not found *//* ww w .ja va 2 s .c o m*/ private static List<Interval> findInitialSearchInterval(VersionedIntervalTimeline<String, DataSegment> timeline, Period skipOffset, @Nullable List<Interval> skipIntervals) { Preconditions.checkArgument(timeline != null && !timeline.isEmpty(), "timeline should not be null or empty"); Preconditions.checkNotNull(skipOffset, "skipOffset"); final TimelineObjectHolder<String, DataSegment> first = Preconditions.checkNotNull(timeline.first(), "first"); final TimelineObjectHolder<String, DataSegment> last = Preconditions.checkNotNull(timeline.last(), "last"); final List<Interval> fullSkipIntervals = sortAndAddSkipIntervalFromLatest(last.getInterval().getEnd(), skipOffset, skipIntervals); final Interval totalInterval = new Interval(first.getInterval().getStart(), last.getInterval().getEnd()); final List<Interval> filteredInterval = filterSkipIntervals(totalInterval, fullSkipIntervals); final List<Interval> searchIntervals = new ArrayList<>(); for (Interval lookupInterval : filteredInterval) { final List<TimelineObjectHolder<String, DataSegment>> holders = timeline .lookup(new Interval(lookupInterval.getStart(), lookupInterval.getEnd())); final List<DataSegment> segments = holders.stream() .flatMap(holder -> StreamSupport.stream(holder.getObject().spliterator(), false)) .map(PartitionChunk::getObject) .filter(segment -> lookupInterval.contains(segment.getInterval())) .sorted((s1, s2) -> Comparators.intervalsByStartThenEnd().compare(s1.getInterval(), s2.getInterval())) .collect(Collectors.toList()); if (!segments.isEmpty()) { searchIntervals.add(new Interval(segments.get(0).getInterval().getStart(), segments.get(segments.size() - 1).getInterval().getEnd())); } } return searchIntervals; }
From source file:org.apache.druid.server.coordinator.helper.NewestSegmentFirstIterator.java
License:Apache License
/** * Returns a list of intervals which are contained by totalInterval but don't ovarlap with skipIntervals. * * @param totalInterval total interval/* w w w. j a v a2 s.c om*/ * @param skipIntervals intervals to skip. This should be sorted by {@link Comparators#intervalsByStartThenEnd()}. */ @VisibleForTesting static List<Interval> filterSkipIntervals(Interval totalInterval, List<Interval> skipIntervals) { final List<Interval> filteredIntervals = new ArrayList<>(skipIntervals.size() + 1); DateTime remainingStart = totalInterval.getStart(); DateTime remainingEnd = totalInterval.getEnd(); for (Interval skipInterval : skipIntervals) { if (skipInterval.getStart().isBefore(remainingStart) && skipInterval.getEnd().isAfter(remainingStart)) { remainingStart = skipInterval.getEnd(); } else if (skipInterval.getStart().isBefore(remainingEnd) && skipInterval.getEnd().isAfter(remainingEnd)) { remainingEnd = skipInterval.getStart(); } else if (!remainingStart.isAfter(skipInterval.getStart()) && !remainingEnd.isBefore(skipInterval.getEnd())) { filteredIntervals.add(new Interval(remainingStart, skipInterval.getStart())); remainingStart = skipInterval.getEnd(); } else { // Ignore this skipInterval log.warn("skipInterval[%s] is not contained in remainingInterval[%s]", skipInterval, new Interval(remainingStart, remainingEnd)); } } if (!remainingStart.equals(remainingEnd)) { filteredIntervals.add(new Interval(remainingStart, remainingEnd)); } return filteredIntervals; }
From source file:org.apache.druid.timeline.VersionedIntervalTimeline.java
License:Apache License
/** * @param timeline//w w w.j a va 2 s . co m * @param key * @param entry * * @return boolean flag indicating whether or not we inserted or discarded something */ private boolean addAtKey(NavigableMap<Interval, TimelineEntry> timeline, Interval key, TimelineEntry entry) { boolean retVal = false; Interval currKey = key; Interval entryInterval = entry.getTrueInterval(); if (!currKey.overlaps(entryInterval)) { return false; } while (entryInterval != null && currKey != null && currKey.overlaps(entryInterval)) { final Interval nextKey = timeline.higherKey(currKey); final int versionCompare = versionComparator.compare(entry.getVersion(), timeline.get(currKey).getVersion()); if (versionCompare < 0) { // since the entry version is lower than the existing one, the existing one overwrites the given entry // if overlapped. if (currKey.contains(entryInterval)) { // the version of the entry of currKey is larger than that of the given entry. Discard it return true; } else if (currKey.getStart().isBefore(entryInterval.getStart())) { // | entry | // | cur | // => |new| entryInterval = new Interval(currKey.getEnd(), entryInterval.getEnd()); } else { // | entry | // | cur | // => |new| addIntervalToTimeline(new Interval(entryInterval.getStart(), currKey.getStart()), entry, timeline); // | entry | // | cur | // => |new| if (entryInterval.getEnd().isAfter(currKey.getEnd())) { entryInterval = new Interval(currKey.getEnd(), entryInterval.getEnd()); } else { // Discard this entry since there is no portion of the entry interval that goes past the end of the curr // key interval. entryInterval = null; } } } else if (versionCompare > 0) { // since the entry version is greater than the existing one, the given entry overwrites the existing one // if overlapped. final TimelineEntry oldEntry = timeline.remove(currKey); if (currKey.contains(entryInterval)) { // | cur | // | entry | // => |old| new |old| addIntervalToTimeline(new Interval(currKey.getStart(), entryInterval.getStart()), oldEntry, timeline); addIntervalToTimeline(new Interval(entryInterval.getEnd(), currKey.getEnd()), oldEntry, timeline); addIntervalToTimeline(entryInterval, entry, timeline); return true; } else if (currKey.getStart().isBefore(entryInterval.getStart())) { // | cur | // | entry | // => |old| addIntervalToTimeline(new Interval(currKey.getStart(), entryInterval.getStart()), oldEntry, timeline); } else if (entryInterval.getEnd().isBefore(currKey.getEnd())) { // | cur | // | entry | // => |old| addIntervalToTimeline(new Interval(entryInterval.getEnd(), currKey.getEnd()), oldEntry, timeline); } } else { if (timeline.get(currKey).equals(entry)) { // This occurs when restoring segments timeline.remove(currKey); } else { throw new UOE("Cannot add overlapping segments [%s and %s] with the same version [%s]", currKey, entryInterval, entry.getVersion()); } } currKey = nextKey; retVal = true; } addIntervalToTimeline(entryInterval, entry, timeline); return retVal; }
From source file:org.apache.druid.timeline.VersionedIntervalTimeline.java
License:Apache License
private List<TimelineObjectHolder<VersionType, ObjectType>> lookup(Interval interval, boolean incompleteOk) { List<TimelineObjectHolder<VersionType, ObjectType>> retVal = new ArrayList<>(); NavigableMap<Interval, TimelineEntry> timeline = (incompleteOk) ? incompletePartitionsTimeline : completePartitionsTimeline; for (Entry<Interval, TimelineEntry> entry : timeline.entrySet()) { Interval timelineInterval = entry.getKey(); TimelineEntry val = entry.getValue(); if (timelineInterval.overlaps(interval)) { retVal.add(new TimelineObjectHolder<>(timelineInterval, val.getTrueInterval(), val.getVersion(), new PartitionHolder<>(val.getPartitionHolder()))); }//from w w w . j a v a 2s . com } if (retVal.isEmpty()) { return retVal; } TimelineObjectHolder<VersionType, ObjectType> firstEntry = retVal.get(0); if (interval.overlaps(firstEntry.getInterval()) && interval.getStart().isAfter(firstEntry.getInterval().getStart())) { retVal.set(0, new TimelineObjectHolder<>(new Interval(interval.getStart(), firstEntry.getInterval().getEnd()), firstEntry.getTrueInterval(), firstEntry.getVersion(), firstEntry.getObject())); } TimelineObjectHolder<VersionType, ObjectType> lastEntry = retVal.get(retVal.size() - 1); if (interval.overlaps(lastEntry.getInterval()) && interval.getEnd().isBefore(lastEntry.getInterval().getEnd())) { retVal.set(retVal.size() - 1, new TimelineObjectHolder<>(new Interval(lastEntry.getInterval().getStart(), interval.getEnd()), lastEntry.getTrueInterval(), lastEntry.getVersion(), lastEntry.getObject())); } return retVal; }
From source file:org.apache.hadoop.hive.druid.DruidStorageHandlerUtils.java
License:Apache License
private static VersionedIntervalTimeline<String, DataSegment> getTimelineForIntervalWithHandle( final Handle handle, final String dataSource, final Interval interval, final MetadataStorageTablesConfig dbTables) throws IOException { Query<Map<String, Object>> sql = handle.createQuery(String.format( "SELECT payload FROM %s WHERE used = true AND dataSource = ? AND start <= ? AND \"end\" >= ?", dbTables.getSegmentsTable())).bind(0, dataSource).bind(1, interval.getEnd().toString()) .bind(2, interval.getStart().toString()); final VersionedIntervalTimeline<String, DataSegment> timeline = new VersionedIntervalTimeline<>( Ordering.natural());//from w w w . j a v a2 s .c o m final ResultIterator<byte[]> dbSegments = sql.map(ByteArrayMapper.FIRST).iterator(); try { while (dbSegments.hasNext()) { final byte[] payload = dbSegments.next(); DataSegment segment = JSON_MAPPER.readValue(payload, DataSegment.class); timeline.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment)); } } finally { dbSegments.close(); } return timeline; }
From source file:org.archfirst.common.datetime.DateTimeUtil.java
License:Apache License
public static final String toString(Interval interval) { StringBuilder builder = new StringBuilder(); DateTimeFormatter fmt = DateTimeFormat.forPattern(DateTimeUtil.DATE_TIME_SEC_PATTERN); builder.append("["); builder.append(interval.getStart().toString(fmt)); builder.append(", "); builder.append(interval.getEnd().toString(fmt)); builder.append("]"); return builder.toString(); }
From source file:org.archfirst.common.datetime.DateTimeUtil.java
License:Apache License
public static Interval intervalStartingOn(Interval interval, DateTime start) { return new Interval(start, interval.getEnd()); }
From source file:org.archfirst.common.datetime.IntervalComparator.java
License:Apache License
@Override public int compare(Interval interval1, Interval interval2) { int result = 0; // assume equal if (interval1 == null) { return -1; } else if (interval2 == null) { return 1; } else {//from w w w.ja v a2s . c o m result = interval1.getStart().compareTo(interval2.getStart()); if (result == 0) { result = interval1.getEnd().compareTo(interval2.getEnd()); } } return result; }
From source file:org.atlasapi.remotesite.bbc.BbcProgrammesPolicyClient.java
License:Apache License
private Maybe<Policy> policyFromAtom(String data) { Set<Country> availableCountries = null; Matcher restrictionMatcher = RESTRICTION_PATTERN.matcher(data); if (restrictionMatcher.find()) { availableCountries = Countries.fromDelimtedList(restrictionMatcher.group(1)); }//from w ww .java2 s. co m Interval validRange = null; Matcher matcher = START_END_FORMAT.matcher(data); if (matcher.find()) { validRange = new Interval(new DateTime(matcher.group(1)), new DateTime(matcher.group(2))); } if (availableCountries != null && validRange != null) { Policy policy = new Policy(); policy.setAvailableCountries(availableCountries); policy.setAvailabilityStart(validRange.getStart()); policy.setAvailabilityEnd(validRange.getEnd()); policy.setPlayer(locationPolicyIds.getIPlayerPlayerId()); policy.setService(locationPolicyIds.getWebServiceId()); return Maybe.just(policy); } return Maybe.nothing(); }