Example usage for org.joda.time Interval Interval

List of usage examples for org.joda.time Interval Interval

Introduction

In this page you can find the example usage for org.joda.time Interval Interval.

Prototype

public Interval(Object interval, Chronology chronology) 

Source Link

Document

Constructs a time interval by converting or copying from another object, overriding the chronology.

Usage

From source file:io.druid.server.master.DruidMasterBalancerProfiler.java

License:Open Source License

public void bigProfiler() {
    Stopwatch watch = new Stopwatch();
    int numSegments = 55000;
    int numServers = 50;
    EasyMock.expect(manager.getAllRules()).andReturn(ImmutableMap.<String, List<Rule>>of("test", rules))
            .anyTimes();/*from   w ww.ja  v a2  s. c o m*/
    EasyMock.expect(manager.getRules(EasyMock.<String>anyObject())).andReturn(rules).anyTimes();
    EasyMock.expect(manager.getRulesWithDefault(EasyMock.<String>anyObject())).andReturn(rules).anyTimes();
    EasyMock.replay(manager);

    master.moveSegment(EasyMock.<String>anyObject(), EasyMock.<String>anyObject(), EasyMock.<String>anyObject(),
            EasyMock.<LoadPeonCallback>anyObject());
    EasyMock.expectLastCall().anyTimes();
    EasyMock.replay(master);

    List<DruidServer> serverList = Lists.newArrayList();
    Map<String, LoadQueuePeon> peonMap = Maps.newHashMap();
    List<ServerHolder> serverHolderList = Lists.newArrayList();
    Map<String, DataSegment> segmentMap = Maps.newHashMap();
    for (int i = 0; i < numSegments; i++) {
        segmentMap.put("segment" + i, new DataSegment("datasource" + i,
                new Interval(new DateTime("2012-01-01"), (new DateTime("2012-01-01")).plusHours(1)),
                (new DateTime("2012-03-01")).toString(), Maps.<String, Object>newHashMap(),
                Lists.<String>newArrayList(), Lists.<String>newArrayList(), new NoneShardSpec(), 0, 4L));
    }

    for (int i = 0; i < numServers; i++) {
        DruidServer server = EasyMock.createMock(DruidServer.class);
        EasyMock.expect(server.getMetadata()).andReturn(null).anyTimes();
        EasyMock.expect(server.getCurrSize()).andReturn(30L).atLeastOnce();
        EasyMock.expect(server.getMaxSize()).andReturn(100L).atLeastOnce();
        EasyMock.expect(server.getTier()).andReturn("normal").anyTimes();
        EasyMock.expect(server.getName()).andReturn(Integer.toString(i)).atLeastOnce();
        EasyMock.expect(server.getHost()).andReturn(Integer.toString(i)).anyTimes();
        if (i == 0) {
            EasyMock.expect(server.getSegments()).andReturn(segmentMap).anyTimes();
        } else {
            EasyMock.expect(server.getSegments()).andReturn(new HashMap<String, DataSegment>()).anyTimes();
        }
        EasyMock.expect(server.getSegment(EasyMock.<String>anyObject())).andReturn(null).anyTimes();
        EasyMock.replay(server);

        LoadQueuePeon peon = new LoadQueuePeonTester();
        peonMap.put(Integer.toString(i), peon);
        serverHolderList.add(new ServerHolder(server, peon));
    }

    DruidMasterRuntimeParams params = DruidMasterRuntimeParams.newBuilder()
            .withDruidCluster(
                    new DruidCluster(ImmutableMap.<String, MinMaxPriorityQueue<ServerHolder>>of("normal",
                            MinMaxPriorityQueue.orderedBy(DruidMasterBalancerTester.percentUsedComparator)
                                    .create(serverHolderList))))
            .withLoadManagementPeons(peonMap).withAvailableSegments(segmentMap.values())
            .withDynamicConfigs(new MasterDynamicConfig.Builder().withMaxSegmentsToMove(MAX_SEGMENTS_TO_MOVE)
                    .withReplicantLifetime(500).withReplicationThrottleLimit(5).build())
            .withBalancerReferenceTimestamp(new DateTime("2013-01-01")).withEmitter(emitter)
            .withDatabaseRuleManager(manager).withReplicationManager(new ReplicationThrottler(2, 500))
            .withSegmentReplicantLookup(SegmentReplicantLookup
                    .make(new DruidCluster(ImmutableMap.<String, MinMaxPriorityQueue<ServerHolder>>of("normal",
                            MinMaxPriorityQueue.orderedBy(DruidMasterBalancerTester.percentUsedComparator)
                                    .create(serverHolderList)))))
            .build();

    DruidMasterBalancerTester tester = new DruidMasterBalancerTester(master);
    DruidMasterRuleRunner runner = new DruidMasterRuleRunner(master);
    watch.start();
    DruidMasterRuntimeParams balanceParams = tester.run(params);
    DruidMasterRuntimeParams assignParams = runner.run(params);
    System.out.println(watch.stop());
}

From source file:io.druid.server.master.DruidMasterSegmentMerger.java

License:Open Source License

@Override
public DruidMasterRuntimeParams run(DruidMasterRuntimeParams params) {
    MergerWhitelist whitelist = whiteListRef.get();

    MasterStats stats = new MasterStats();
    Map<String, VersionedIntervalTimeline<String, DataSegment>> dataSources = Maps.newHashMap();

    // Find serviced segments by using a timeline
    for (DataSegment dataSegment : params.getAvailableSegments()) {
        if (whitelist == null || whitelist.contains(dataSegment.getDataSource())) {
            VersionedIntervalTimeline<String, DataSegment> timeline = dataSources
                    .get(dataSegment.getDataSource());
            if (timeline == null) {
                timeline = new VersionedIntervalTimeline<String, DataSegment>(Ordering.<String>natural());
                dataSources.put(dataSegment.getDataSource(), timeline);
            }/*from   w ww  .j  a  v  a2 s .c  o m*/
            timeline.add(dataSegment.getInterval(), dataSegment.getVersion(),
                    dataSegment.getShardSpec().createChunk(dataSegment));
        }
    }

    // Find segments to merge
    for (final Map.Entry<String, VersionedIntervalTimeline<String, DataSegment>> entry : dataSources
            .entrySet()) {
        // Get serviced segments from the timeline
        VersionedIntervalTimeline<String, DataSegment> timeline = entry.getValue();
        List<TimelineObjectHolder<String, DataSegment>> timelineObjects = timeline
                .lookup(new Interval(new DateTime(0), new DateTime("3000-01-01")));

        // Accumulate timelineObjects greedily until we reach our limits, then backtrack to the maximum complete set
        SegmentsToMerge segmentsToMerge = new SegmentsToMerge();

        for (int i = 0; i < timelineObjects.size(); i++) {
            if (!segmentsToMerge.add(timelineObjects.get(i))
                    || segmentsToMerge.getByteCount() > params.getMasterDynamicConfig().getMergeBytesLimit()
                    || segmentsToMerge.getSegmentCount() >= params.getMasterDynamicConfig()
                            .getMergeSegmentsLimit()) {
                i -= segmentsToMerge.backtrack(params.getMasterDynamicConfig().getMergeBytesLimit());

                if (segmentsToMerge.getSegmentCount() > 1) {
                    stats.addToGlobalStat("mergedCount", mergeSegments(segmentsToMerge, entry.getKey()));
                }

                if (segmentsToMerge.getSegmentCount() == 0) {
                    // Backtracked all the way to zero. Increment by one so we continue to make progress.
                    i++;
                }

                segmentsToMerge = new SegmentsToMerge();
            }
        }

        // Finish any timelineObjects to merge that may have not hit threshold
        segmentsToMerge.backtrack(params.getMasterDynamicConfig().getMergeBytesLimit());
        if (segmentsToMerge.getSegmentCount() > 1) {
            stats.addToGlobalStat("mergedCount", mergeSegments(segmentsToMerge, entry.getKey()));
        }
    }

    return params.buildFromExisting().withMasterStats(stats).build();
}

From source file:io.druid.sql.avatica.DruidMeta.java

License:Apache License

/**
 * Get a connection, or throw an exception if it doesn't exist. Also refreshes the timeout timer.
 *
 * @param connectionId connection id/*w w  w.  ja va 2s . c o m*/
 *
 * @return the connection
 *
 * @throws NoSuchConnectionException if the connection id doesn't exist
 */
@Nonnull
private DruidConnection getDruidConnection(final String connectionId) {
    final DruidConnection connection = connections.get(connectionId);

    if (connection == null) {
        throw new NoSuchConnectionException(connectionId);
    }

    return connection.sync(exec.schedule(() -> {
        log.debug("Connection[%s] timed out.", connectionId);
        closeConnection(new ConnectionHandle(connectionId));
    }, new Interval(new DateTime(), config.getConnectionIdleTimeout()).toDurationMillis(),
            TimeUnit.MILLISECONDS));
}

From source file:io.druid.sql.calcite.filtration.RangeSets.java

License:Apache License

public static List<Interval> toIntervals(final RangeSet<Long> rangeSet) {
    final List<Interval> retVal = Lists.newArrayList();

    for (Range<Long> range : rangeSet.asRanges()) {
        final long start;
        final long end;

        if (range.hasLowerBound()) {
            final long millis = range.lowerEndpoint();
            start = millis + (range.lowerBoundType() == BoundType.OPEN ? 1 : 0);
        } else {/*from  ww w . j  av a  2  s . c o m*/
            start = Filtration.eternity().getStartMillis();
        }

        if (range.hasUpperBound()) {
            final long millis = range.upperEndpoint();
            end = millis + (range.upperBoundType() == BoundType.OPEN ? 0 : 1);
        } else {
            end = Filtration.eternity().getEndMillis();
        }

        retVal.add(new Interval(start, end));
    }

    return retVal;
}

From source file:io.druid.sql.calcite.planner.PlannerContext.java

License:Apache License

public DataContext createDataContext(final JavaTypeFactory typeFactory) {
    class DruidDataContext implements DataContext {
        private final Map<String, Object> context = ImmutableMap.<String, Object>of(
                DataContext.Variable.UTC_TIMESTAMP.camelName, localNow.getMillis(),
                DataContext.Variable.CURRENT_TIMESTAMP.camelName, localNow.getMillis(),
                DataContext.Variable.LOCAL_TIMESTAMP.camelName,
                new Interval(new DateTime("1970-01-01T00:00:00.000", localNow.getZone()), localNow)
                        .toDurationMillis(),
                DataContext.Variable.TIME_ZONE.camelName, localNow.getZone().toTimeZone());

        @Override//  w  w  w . j a  v a  2 s  . c om
        public SchemaPlus getRootSchema() {
            throw new UnsupportedOperationException();
        }

        @Override
        public JavaTypeFactory getTypeFactory() {
            return typeFactory;
        }

        @Override
        public QueryProvider getQueryProvider() {
            throw new UnsupportedOperationException();
        }

        @Override
        public Object get(final String name) {
            return context.get(name);
        }
    }

    return new DruidDataContext();
}

From source file:io.druid.timeline.DataSegmentUtils.java

License:Apache License

/**
 * Parses a segment identifier into its components: dataSource, interval, version, and any trailing tags. Ignores
 * shard spec./*from  w  w  w  .  j ava2s.co m*/
 *
 * It is possible that this method may incorrectly parse an identifier, for example if the dataSource name in the
 * identifier contains a DateTime parseable string such as 'datasource_2000-01-01T00:00:00.000Z' and dataSource was
 * provided as 'datasource'. The desired behavior in this case would be to return null since the identifier does not
 * actually belong to the provided dataSource but a non-null result would be returned. This is an edge case that would
 * currently only affect paged select queries with a union dataSource of two similarly-named dataSources as in the
 * given example.
 *
 * @param dataSource the dataSource corresponding to this identifier
 * @param identifier segment identifier
 * @return a {@link DataSegmentUtils.SegmentIdentifierParts} object if the identifier could be parsed, null otherwise
 */
public static SegmentIdentifierParts valueOf(String dataSource, String identifier) {
    if (!identifier.startsWith(StringUtils.format("%s_", dataSource))) {
        return null;
    }

    String remaining = identifier.substring(dataSource.length() + 1);
    String[] splits = remaining.split(DataSegment.delimiter);
    if (splits.length < 3) {
        return null;
    }

    DateTimeFormatter formatter = ISODateTimeFormat.dateTime();

    try {
        DateTime start = formatter.parseDateTime(splits[0]);
        DateTime end = formatter.parseDateTime(splits[1]);
        String version = splits[2];
        String trail = splits.length > 3 ? join(splits, DataSegment.delimiter, 3, splits.length) : null;

        return new SegmentIdentifierParts(dataSource, new Interval(start.getMillis(), end.getMillis()), version,
                trail);
    } catch (IllegalArgumentException e) {
        return null;
    }
}

From source file:io.druid.timeline.VersionedIntervalTimeline.java

License:Apache License

/**
 *
 * @param timeline/* w w  w.  ja va2s  .  co m*/
 * @param key
 * @param entry
 * @return boolean flag indicating whether or not we inserted or discarded something
 */
private boolean addAtKey(NavigableMap<Interval, TimelineEntry> timeline, Interval key, TimelineEntry entry) {
    boolean retVal = false;
    Interval currKey = key;
    Interval entryInterval = entry.getTrueInterval();

    if (!currKey.overlaps(entryInterval)) {
        return false;
    }

    while (entryInterval != null && currKey != null && currKey.overlaps(entryInterval)) {
        Interval nextKey = timeline.higherKey(currKey);

        int versionCompare = versionComparator.compare(entry.getVersion(), timeline.get(currKey).getVersion());

        if (versionCompare < 0) {
            if (currKey.contains(entryInterval)) {
                return true;
            } else if (currKey.getStart().isBefore(entryInterval.getStart())) {
                entryInterval = new Interval(currKey.getEnd(), entryInterval.getEnd());
            } else {
                addIntervalToTimeline(new Interval(entryInterval.getStart(), currKey.getStart()), entry,
                        timeline);

                if (entryInterval.getEnd().isAfter(currKey.getEnd())) {
                    entryInterval = new Interval(currKey.getEnd(), entryInterval.getEnd());
                } else {
                    entryInterval = null; // discard this entry
                }
            }
        } else if (versionCompare > 0) {
            TimelineEntry oldEntry = timeline.remove(currKey);

            if (currKey.contains(entryInterval)) {
                addIntervalToTimeline(new Interval(currKey.getStart(), entryInterval.getStart()), oldEntry,
                        timeline);
                addIntervalToTimeline(new Interval(entryInterval.getEnd(), currKey.getEnd()), oldEntry,
                        timeline);
                addIntervalToTimeline(entryInterval, entry, timeline);

                return true;
            } else if (currKey.getStart().isBefore(entryInterval.getStart())) {
                addIntervalToTimeline(new Interval(currKey.getStart(), entryInterval.getStart()), oldEntry,
                        timeline);
            } else if (entryInterval.getEnd().isBefore(currKey.getEnd())) {
                addIntervalToTimeline(new Interval(entryInterval.getEnd(), currKey.getEnd()), oldEntry,
                        timeline);
            }
        } else {
            if (timeline.get(currKey).equals(entry)) {
                // This occurs when restoring segments
                timeline.remove(currKey);
            } else {
                throw new UnsupportedOperationException(
                        String.format("Cannot add overlapping segments [%s and %s] with the same version [%s]",
                                currKey, entryInterval, entry.getVersion()));
            }
        }

        currKey = nextKey;
        retVal = true;
    }

    addIntervalToTimeline(entryInterval, entry, timeline);

    return retVal;
}

From source file:io.github.benas.jpopulator.randomizers.joda.DefaultJodaIntervalRandomizer.java

License:Open Source License

@Override
public Interval getRandomValue() {
    long startDate = Math.abs(ConstantsUtil.RANDOM.nextInt());
    long endDate = startDate + Math.abs(ConstantsUtil.RANDOM.nextInt());
    return new Interval(startDate, endDate);
}

From source file:io.github.benas.randombeans.randomizers.jodatime.JodaTimeIntervalRandomizer.java

License:Open Source License

@Override
public Interval getRandomValue() {
    long startDate = Math.abs(Constants.RANDOM.nextInt());
    long endDate = startDate + Math.abs(Constants.RANDOM.nextInt());
    return new Interval(startDate, endDate);
}

From source file:io.sidecar.query.DateRange.java

License:Apache License

public DateRange(final DateTime begin, final DateTime end) {
    this.begin = begin;
    this.end = end;
    Preconditions.checkArgument(end.isAfter(begin));

    this.duration = new Interval(this.begin, this.end);
}