List of usage examples for org.joda.time Interval Interval
public Interval(Object interval)
From source file:IntervalTypeAdapter.java
License:Apache License
@Override public Interval read(JsonReader in) throws IOException { if (in.peek() == JsonToken.NULL) { in.nextNull();/*from www .ja v a 2 s .c o m*/ return null; } return new Interval(in.nextString()); }
From source file:com.almende.eve.entity.Weight.java
License:Apache License
/** * Instantiates a new weight./*from w w w.j av a 2 s . com*/ * * @param other * the other */ public Weight(final Weight other) { interval = new Interval(other.interval); weight = new Double(other.weight); }
From source file:com.fatboyindustrial.gsonjodatime.IntervalConverter.java
License:Open Source License
/** * Gson invokes this call-back method during deserialization when it encounters a field of the * specified type. <p>//w w w .ja v a 2 s . c o m * * In the implementation of this call-back method, you should consider invoking * {@link JsonDeserializationContext#deserialize(JsonElement, Type)} method to create objects * for any non-trivial field of the returned object. However, you should never invoke it on the * the same type passing {@code json} since that will cause an infinite loop (Gson will call your * call-back method again). * * @param json The Json data being deserialized * @param typeOfT The type of the Object to deserialize to * @return a deserialized object of the specified type typeOfT which is a subclass of {@code T} * @throws JsonParseException if json is not in the expected format of {@code typeOfT} */ @Override public Interval deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { return new Interval(json.getAsString()); }
From source file:com.linkedin.pinot.core.query.pruner.TimeSegmentPruner.java
License:Apache License
@Override public boolean prune(IndexSegment segment, BrokerRequest brokerRequest) { Interval interval = segment.getSegmentMetadata().getTimeInterval(); if (interval != null && brokerRequest.getTimeInterval() != null && !new Interval(brokerRequest.getTimeInterval()).contains(interval)) { return true; }/*from ww w. j a v a2s . co m*/ return false; }
From source file:com.metamx.druid.http.InfoResource.java
License:Open Source License
@DELETE @Path("/datasources/{dataSourceName}") public Response deleteDataSource(@PathParam("dataSourceName") final String dataSourceName, @QueryParam("kill") final String kill, @QueryParam("interval") final String interval) { // This is weird enough to have warranted some sort of T0D0 comment at one point, but it will likely be all // rewritten once Guice introduced, and that's the brunt of the information that was in the original T0D0 too. if (indexingServiceClient == null) { return Response.status(Response.Status.OK).entity(ImmutableMap.of("error", "no indexing service found")) .build();// w w w. ja v a2 s . c o m } if (kill != null && Boolean.valueOf(kill)) { indexingServiceClient.killSegments(dataSourceName, new Interval(interval)); } else { if (!databaseSegmentManager.removeDatasource(dataSourceName)) { return Response.status(Response.Status.NOT_FOUND).build(); } } return Response.status(Response.Status.OK).build(); }
From source file:com.metamx.druid.indexer.StringIntervalFunction.java
License:Open Source License
@Override public Interval apply(String input) { return new Interval(input); }
From source file:com.metamx.druid.indexing.common.task.AppendTask.java
License:Open Source License
@Override public File merge(final Map<DataSegment, File> segments, final File outDir) throws Exception { VersionedIntervalTimeline<String, DataSegment> timeline = new VersionedIntervalTimeline<String, DataSegment>( Ordering.<String>natural().nullsFirst()); for (DataSegment segment : segments.keySet()) { timeline.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment)); }/*from w ww . j a v a2 s . c om*/ final List<SegmentToMergeHolder> segmentsToMerge = Lists.transform( timeline.lookup(new Interval("1000-01-01/3000-01-01")), new Function<TimelineObjectHolder<String, DataSegment>, SegmentToMergeHolder>() { @Override public SegmentToMergeHolder apply(TimelineObjectHolder<String, DataSegment> input) { final DataSegment segment = input.getObject().getChunk(0).getObject(); final File file = Preconditions.checkNotNull(segments.get(segment), "File for segment %s", segment.getIdentifier()); return new SegmentToMergeHolder(segment, input.getInterval(), file); } }); List<IndexableAdapter> adapters = Lists.newArrayList(); for (final SegmentToMergeHolder holder : segmentsToMerge) { adapters.add(new RowboatFilteringIndexAdapter( new QueryableIndexIndexableAdapter(IndexIO.loadIndex(holder.getFile())), new Predicate<Rowboat>() { @Override public boolean apply(@Nullable Rowboat input) { return holder.getInterval().contains(input.getTimestamp()); } })); } return IndexMerger.append(adapters, outDir); }
From source file:com.metamx.druid.indexing.coordinator.RealtimeishTask.java
License:Open Source License
@Override public TaskStatus run(TaskToolbox toolbox) throws Exception { final Interval interval1 = new Interval("2010-01-01T00/PT1H"); final Interval interval2 = new Interval("2010-01-01T01/PT1H"); // Sort of similar to what realtime tasks do: // Acquire lock for first interval final TaskLock lock1 = toolbox.getTaskActionClient().submit(new LockAcquireAction(interval1)); final List<TaskLock> locks1 = toolbox.getTaskActionClient().submit(new LockListAction()); // (Confirm lock sanity) Assert.assertEquals("lock1 interval", interval1, lock1.getInterval()); Assert.assertEquals("locks1", ImmutableList.of(lock1), locks1); // Acquire lock for second interval final TaskLock lock2 = toolbox.getTaskActionClient().submit(new LockAcquireAction(interval2)); final List<TaskLock> locks2 = toolbox.getTaskActionClient().submit(new LockListAction()); // (Confirm lock sanity) Assert.assertEquals("lock2 interval", interval2, lock2.getInterval()); Assert.assertEquals("locks2", ImmutableList.of(lock1, lock2), locks2); // Push first segment toolbox.getTaskActionClient().submit(new SegmentInsertAction(ImmutableSet.of( DataSegment.builder().dataSource("foo").interval(interval1).version(lock1.getVersion()).build()))); // Release first lock toolbox.getTaskActionClient().submit(new LockReleaseAction(interval1)); final List<TaskLock> locks3 = toolbox.getTaskActionClient().submit(new LockListAction()); // (Confirm lock sanity) Assert.assertEquals("locks3", ImmutableList.of(lock2), locks3); // Push second segment toolbox.getTaskActionClient().submit(new SegmentInsertAction(ImmutableSet.of( DataSegment.builder().dataSource("foo").interval(interval2).version(lock2.getVersion()).build()))); // Release second lock toolbox.getTaskActionClient().submit(new LockReleaseAction(interval2)); final List<TaskLock> locks4 = toolbox.getTaskActionClient().submit(new LockListAction()); // (Confirm lock sanity) Assert.assertEquals("locks4", ImmutableList.<TaskLock>of(), locks4); // Exit/*from www .ja v a 2s.c o m*/ return TaskStatus.success(getId()); }
From source file:com.metamx.druid.merger.common.task.AppendTask.java
License:Open Source License
@Override public File merge(final Map<DataSegment, File> segments, final File outDir) throws Exception { VersionedIntervalTimeline<String, DataSegment> timeline = new VersionedIntervalTimeline<String, DataSegment>( Ordering.natural().nullsFirst()); for (DataSegment segment : segments.keySet()) { timeline.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment)); }//from w w w. j a va 2 s . co m final List<SegmentToMergeHolder> segmentsToMerge = Lists.transform( timeline.lookup(new Interval("1000-01-01/3000-01-01")), new Function<TimelineObjectHolder<String, DataSegment>, SegmentToMergeHolder>() { @Override public SegmentToMergeHolder apply(TimelineObjectHolder<String, DataSegment> input) { final DataSegment segment = input.getObject().getChunk(0).getObject(); final File file = Preconditions.checkNotNull(segments.get(segment), "File for segment %s", segment.getIdentifier()); return new SegmentToMergeHolder(segment, input.getInterval(), file); } }); List<IndexableAdapter> adapters = Lists.newArrayList(); for (final SegmentToMergeHolder holder : segmentsToMerge) { adapters.add(new RowboatFilteringIndexAdapter( new QueryableIndexIndexableAdapter(IndexIO.loadIndex(holder.getFile())), new Predicate<Rowboat>() { @Override public boolean apply(@Nullable Rowboat input) { return holder.getInterval().contains(input.getTimestamp()); } })); } return IndexMerger.append(adapters, outDir); }
From source file:com.metamx.druid.query.segment.LegacySegmentSpec.java
License:Open Source License
private static List<Interval> convertValue(Object intervals) { final List<?> intervalStringList; if (intervals instanceof String) { intervalStringList = Arrays.asList((((String) intervals).split(","))); } else if (intervals instanceof Map) { intervalStringList = (List) ((Map) intervals).get("intervals"); } else if (intervals instanceof List) { intervalStringList = (List) intervals; } else {//w w w.j a v a 2s . c o m throw new IAE("Unknown type[%s] for intervals[%s]", intervals.getClass(), intervals); } return Lists.transform(intervalStringList, new Function<Object, Interval>() { @Override public Interval apply(Object input) { return new Interval(input); } }); }