Example usage for org.joda.time Interval getStartMillis

List of usage examples for org.joda.time Interval getStartMillis

Introduction

In this page you can find the example usage for org.joda.time Interval getStartMillis.

Prototype

public long getStartMillis() 

Source Link

Document

Gets the start of this time interval which is inclusive.

Usage

From source file:org.apache.druid.metadata.IndexerSQLMetadataStorageCoordinator.java

License:Apache License

private CheckExistingSegmentIdResult checkAndGetExistingSegmentId(final Query<Map<String, Object>> query,
        final Interval interval, final String sequenceName, final @Nullable String previousSegmentId,
        final Pair<String, String>... queryVars) throws IOException {
    Query<Map<String, Object>> boundQuery = query;
    for (Pair<String, String> var : queryVars) {
        boundQuery = boundQuery.bind(var.lhs, var.rhs);
    }/*w w  w . jav a  2  s.  c om*/
    final List<byte[]> existingBytes = boundQuery.map(ByteArrayMapper.FIRST).list();

    if (!existingBytes.isEmpty()) {
        final SegmentIdWithShardSpec existingIdentifier = jsonMapper
                .readValue(Iterables.getOnlyElement(existingBytes), SegmentIdWithShardSpec.class);

        if (existingIdentifier.getInterval().getStartMillis() == interval.getStartMillis()
                && existingIdentifier.getInterval().getEndMillis() == interval.getEndMillis()) {
            if (previousSegmentId == null) {
                log.info("Found existing pending segment [%s] for sequence[%s] in DB", existingIdentifier,
                        sequenceName);
            } else {
                log.info("Found existing pending segment [%s] for sequence[%s] (previous = [%s]) in DB",
                        existingIdentifier, sequenceName, previousSegmentId);
            }

            return new CheckExistingSegmentIdResult(true, existingIdentifier);
        } else {
            if (previousSegmentId == null) {
                log.warn(
                        "Cannot use existing pending segment [%s] for sequence[%s] in DB, "
                                + "does not match requested interval[%s]",
                        existingIdentifier, sequenceName, interval);
            } else {
                log.warn(
                        "Cannot use existing pending segment [%s] for sequence[%s] (previous = [%s]) in DB, "
                                + "does not match requested interval[%s]",
                        existingIdentifier, sequenceName, previousSegmentId, interval);
            }

            return new CheckExistingSegmentIdResult(true, null);
        }
    }
    return new CheckExistingSegmentIdResult(false, null);
}

From source file:org.apache.druid.query.filter.IntervalDimFilter.java

License:Apache License

private List<Pair<Long, Long>> makeIntervalLongs() {
    List<Pair<Long, Long>> intervalLongs = new ArrayList<>();
    for (Interval interval : intervals) {
        intervalLongs.add(new Pair<>(interval.getStartMillis(), interval.getEndMillis()));
    }//from  www  .  j  a  v  a 2 s .c  om
    return intervalLongs;
}

From source file:org.apache.druid.query.IntervalChunkingQueryRunner.java

License:Apache License

private static Iterable<Interval> splitInterval(Interval interval, Period period) {
    if (interval.getEndMillis() == interval.getStartMillis()) {
        return Collections.singletonList(interval);
    }//from   ww  w  .  j  av a 2s. c  o  m

    List<Interval> intervals = new ArrayList<>();
    Iterator<Interval> timestamps = new PeriodGranularity(period, null, null).getIterable(interval).iterator();

    DateTime start = DateTimes.max(timestamps.next().getStart(), interval.getStart());
    while (timestamps.hasNext()) {
        DateTime end = timestamps.next().getStart();
        intervals.add(new Interval(start, end));
        start = end;
    }

    if (start.compareTo(interval.getEnd()) < 0) {
        intervals.add(new Interval(start, interval.getEnd()));
    }

    return intervals;
}

From source file:org.apache.druid.query.materializedview.MaterializedViewUtils.java

License:Apache License

/**
 * calculate the intervals which are covered by interval2, but not covered by interval1.
 * result intervals = interval2 - interval1  interval2
 * e.g. /*from   w w w.  jav a 2  s.c  om*/
 * a list of interval2: ["2018-04-01T00:00:00.000Z/2018-04-02T00:00:00.000Z",
 *                       "2018-04-03T00:00:00.000Z/2018-04-10T00:00:00.000Z"]
 * a list of interval1: ["2018-04-04T00:00:00.000Z/2018-04-06T00:00:00.000Z"]
 * the result list of intervals: ["2018-04-01T00:00:00.000Z/2018-04-02T00:00:00.000Z",
 *                                "2018-04-03T00:00:00.000Z/2018-04-04T00:00:00.000Z",
 *                                "2018-04-06T00:00:00.000Z/2018-04-10T00:00:00.000Z"]
 * If interval2 is empty, then return an empty list of interval.                           
 * @param interval2 list of intervals
 * @param interval1 list of intervals
 * @return list of intervals are covered by interval2, but not covered by interval1.
 */
public static List<Interval> minus(List<Interval> interval2, List<Interval> interval1) {
    if (interval1.isEmpty() || interval2.isEmpty()) {
        return interval1;
    }
    Iterator<Interval> it1 = JodaUtils.condenseIntervals(interval1).iterator();
    Iterator<Interval> it2 = JodaUtils.condenseIntervals(interval2).iterator();
    List<Interval> remaining = new ArrayList<>();
    Interval currInterval1 = it1.next();
    Interval currInterval2 = it2.next();
    long start1 = currInterval1.getStartMillis();
    long end1 = currInterval1.getEndMillis();
    long start2 = currInterval2.getStartMillis();
    long end2 = currInterval2.getEndMillis();
    while (true) {
        if (start2 < start1 && end2 <= start1) {
            remaining.add(Intervals.utc(start2, end2));
            if (it2.hasNext()) {
                currInterval2 = it2.next();
                start2 = currInterval2.getStartMillis();
                end2 = currInterval2.getEndMillis();
            } else {
                break;
            }
        }
        if (start2 < start1 && end2 > start1 && end2 < end1) {
            remaining.add(Intervals.utc(start2, start1));
            start1 = end2;
            if (it2.hasNext()) {
                currInterval2 = it2.next();
                start2 = currInterval2.getStartMillis();
                end2 = currInterval2.getEndMillis();
            } else {
                break;
            }
        }
        if (start2 < start1 && end2 == end1) {
            remaining.add(Intervals.utc(start2, start1));
            if (it2.hasNext() && it1.hasNext()) {
                currInterval2 = it2.next();
                start2 = currInterval2.getStartMillis();
                end2 = currInterval2.getEndMillis();
                currInterval1 = it1.next();
                start1 = currInterval1.getStartMillis();
                end1 = currInterval1.getEndMillis();
            } else {
                break;
            }
        }
        if (start2 < start1 && end2 > end1) {
            remaining.add(Intervals.utc(start2, start1));
            start2 = end1;
            if (it1.hasNext()) {
                currInterval1 = it1.next();
                start1 = currInterval1.getStartMillis();
                end1 = currInterval1.getEndMillis();
            } else {
                remaining.add(Intervals.utc(end1, end2));
                break;
            }
        }
        if (start2 == start1 && end2 >= start1 && end2 < end1) {
            start1 = end2;
            if (it2.hasNext()) {
                currInterval2 = it2.next();
                start2 = currInterval2.getStartMillis();
                end2 = currInterval2.getEndMillis();
            } else {
                break;
            }
        }
        if (start2 == start1 && end2 > end1) {
            start2 = end1;
            if (it1.hasNext()) {
                currInterval1 = it1.next();
                start1 = currInterval1.getStartMillis();
                end1 = currInterval1.getEndMillis();
            } else {
                remaining.add(Intervals.utc(end1, end2));
                break;
            }
        }
        if (start2 > start1 && start2 < end1 && end2 < end1) {
            start1 = end2;
            if (it2.hasNext()) {
                currInterval2 = it2.next();
                start2 = currInterval2.getStartMillis();
                end2 = currInterval2.getEndMillis();
            } else {
                break;
            }
        }
        if (start2 > start1 && start2 < end1 && end2 > end1) {
            start2 = end1;
            if (it1.hasNext()) {
                currInterval1 = it1.next();
                start1 = currInterval1.getStartMillis();
                end1 = currInterval1.getEndMillis();
            } else {
                remaining.add(Intervals.utc(end1, end2));
                break;
            }
        }
        if (start2 >= start1 && start2 <= end1 && end2 == end1) {
            if (it2.hasNext() && it1.hasNext()) {
                currInterval2 = it2.next();
                start2 = currInterval2.getStartMillis();
                end2 = currInterval2.getEndMillis();
                currInterval1 = it1.next();
                start1 = currInterval1.getStartMillis();
                end1 = currInterval1.getEndMillis();
            } else {
                break;
            }
        }
        if (start2 >= end1 && end2 > end1) {
            if (it1.hasNext()) {
                currInterval1 = it1.next();
                start1 = currInterval1.getStartMillis();
                end1 = currInterval1.getEndMillis();
            } else {
                remaining.add(Intervals.utc(start2, end2));
                break;
            }
        }
    }

    while (it2.hasNext()) {
        remaining.add(Intervals.of(it2.next().toString()));
    }
    return remaining;
}

From source file:org.apache.druid.query.search.UseIndexesStrategy.java

License:Apache License

static ImmutableBitmap makeTimeFilteredBitmap(final QueryableIndex index, final Segment segment,
        final Filter filter, final Interval interval) {
    final BitmapFactory bitmapFactory = index.getBitmapFactoryForDimensions();
    final ImmutableBitmap baseFilter;
    if (filter == null) {
        baseFilter = null;//from  w  w w  .  j av  a  2 s .  c  o  m
    } else {
        final BitmapIndexSelector selector = new ColumnSelectorBitmapIndexSelector(
                index.getBitmapFactoryForDimensions(), VirtualColumns.EMPTY, index);
        Preconditions.checkArgument(filter.supportsBitmapIndex(selector), "filter[%s] should support bitmap",
                filter);
        baseFilter = filter.getBitmapIndex(selector);
    }

    final ImmutableBitmap timeFilteredBitmap;
    if (!interval.contains(segment.getDataInterval())) {
        final MutableBitmap timeBitmap = bitmapFactory.makeEmptyMutableBitmap();
        final ColumnHolder timeColumnHolder = index.getColumnHolder(ColumnHolder.TIME_COLUMN_NAME);
        try (final NumericColumn timeValues = (NumericColumn) timeColumnHolder.getColumn()) {

            int startIndex = Math.max(0, getStartIndexOfTime(timeValues, interval.getStartMillis(), true));
            int endIndex = Math.min(timeValues.length() - 1,
                    getStartIndexOfTime(timeValues, interval.getEndMillis(), false));

            for (int i = startIndex; i <= endIndex; i++) {
                timeBitmap.add(i);
            }

            final ImmutableBitmap finalTimeBitmap = bitmapFactory.makeImmutableBitmap(timeBitmap);
            timeFilteredBitmap = (baseFilter == null) ? finalTimeBitmap
                    : finalTimeBitmap.intersection(baseFilter);
        }
    } else {
        timeFilteredBitmap = baseFilter;
    }

    return timeFilteredBitmap;
}

From source file:org.apache.druid.query.select.SelectQueryQueryToolChest.java

License:Apache License

@Override
public <T extends LogicalSegment> List<T> filterSegments(SelectQuery query, List<T> segments) {
    // at the point where this code is called, only one datasource should exist.
    final String dataSource = Iterables.getOnlyElement(query.getDataSource().getNames());

    PagingSpec pagingSpec = query.getPagingSpec();
    Map<String, Integer> paging = pagingSpec.getPagingIdentifiers();
    if (paging == null || paging.isEmpty()) {
        return segments;
    }/*www  .  jav a2  s. c om*/

    final Granularity granularity = query.getGranularity();

    TreeMap<Long, Long> granularThresholds = new TreeMap<>();

    // A paged select query using a UnionDataSource will return pagingIdentifiers from segments in more than one
    // dataSource which confuses subsequent queries and causes a failure. To avoid this, filter only the paging keys
    // that are applicable to this dataSource so that each dataSource in a union query gets the appropriate keys.
    paging.keySet().stream().filter(identifier -> SegmentId.tryParse(dataSource, identifier) != null)
            .map(SegmentId.makeIntervalExtractor(dataSource))
            .sorted(query.isDescending() ? Comparators.intervalsByEndThenStart()
                    : Comparators.intervalsByStartThenEnd())
            .forEach(interval -> {
                if (query.isDescending()) {
                    long granularEnd = granularity.bucketStart(interval.getEnd()).getMillis();
                    Long currentEnd = granularThresholds.get(granularEnd);
                    if (currentEnd == null || interval.getEndMillis() > currentEnd) {
                        granularThresholds.put(granularEnd, interval.getEndMillis());
                    }
                } else {
                    long granularStart = granularity.bucketStart(interval.getStart()).getMillis();
                    Long currentStart = granularThresholds.get(granularStart);
                    if (currentStart == null || interval.getStartMillis() < currentStart) {
                        granularThresholds.put(granularStart, interval.getStartMillis());
                    }
                }
            });

    List<T> queryIntervals = Lists.newArrayList(segments);

    Iterator<T> it = queryIntervals.iterator();
    if (query.isDescending()) {
        while (it.hasNext()) {
            Interval interval = it.next().getInterval();
            Map.Entry<Long, Long> ceiling = granularThresholds
                    .ceilingEntry(granularity.bucketStart(interval.getEnd()).getMillis());
            if (ceiling == null || interval.getStartMillis() >= ceiling.getValue()) {
                it.remove();
            }
        }
    } else {
        while (it.hasNext()) {
            Interval interval = it.next().getInterval();
            Map.Entry<Long, Long> floor = granularThresholds
                    .floorEntry(granularity.bucketStart(interval.getStart()).getMillis());
            if (floor == null || interval.getEndMillis() <= floor.getValue()) {
                it.remove();
            }
        }
    }
    return queryIntervals;
}

From source file:org.apache.druid.query.TimewarpOperator.java

License:Apache License

public QueryRunner<T> postProcess(final QueryRunner<T> baseRunner, final long now) {
    return new QueryRunner<T>() {
        @Override/*from w ww.j a v a  2 s  .  c  om*/
        public Sequence<T> run(final QueryPlus<T> queryPlus, final ResponseContext responseContext) {
            final DateTimeZone tz = queryPlus.getQuery().getTimezone();
            final long offset = computeOffset(now, tz);

            final Interval interval = queryPlus.getQuery().getIntervals().get(0);
            final Interval modifiedInterval = new Interval(
                    Math.min(interval.getStartMillis() + offset, now + offset),
                    Math.min(interval.getEndMillis() + offset, now + offset), interval.getChronology());
            return Sequences.map(baseRunner.run(
                    queryPlus.withQuerySegmentSpec(
                            new MultipleIntervalSegmentSpec(Collections.singletonList(modifiedInterval))),
                    responseContext), new Function<T, T>() {
                        @Override
                        public T apply(T input) {
                            if (input instanceof Result) {
                                Result res = (Result) input;
                                Object value = res.getValue();
                                if (value instanceof TimeBoundaryResultValue) {
                                    TimeBoundaryResultValue boundary = (TimeBoundaryResultValue) value;

                                    DateTime minTime;
                                    try {
                                        minTime = boundary.getMinTime();
                                    } catch (IllegalArgumentException e) {
                                        minTime = null;
                                    }

                                    final DateTime maxTime = boundary.getMaxTime();

                                    return (T) ((TimeBoundaryQuery) queryPlus.getQuery()).buildResult(
                                            DateTimes.utc(
                                                    Math.min(res.getTimestamp().getMillis() - offset, now)),
                                            minTime != null ? minTime.minus(offset) : null,
                                            maxTime != null
                                                    ? DateTimes.utc(Math.min(maxTime.getMillis() - offset, now))
                                                    : null)
                                            .iterator().next();
                                }
                                return (T) new Result(res.getTimestamp().minus(offset), value);
                            } else if (input instanceof MapBasedRow) {
                                MapBasedRow row = (MapBasedRow) input;
                                return (T) new MapBasedRow(row.getTimestamp().minus(offset), row.getEvent());
                            }

                            // default to noop for unknown result types
                            return input;
                        }
                    });
        }
    };
}

From source file:org.apache.druid.query.vector.VectorCursorGranularizer.java

License:Apache License

public void setCurrentOffsets(final Interval bucketInterval) {
    final long timeStart = bucketInterval.getStartMillis();
    final long timeEnd = bucketInterval.getEndMillis();

    int vectorSize = cursor.getCurrentVectorSize();
    endOffset = 0;//from ww  w .  java  2  s.  c o  m

    if (timeSelector != null) {
        if (timestamps == null) {
            timestamps = timeSelector.getLongVector();
        }

        // Skip "offset" to start of bucketInterval.
        while (startOffset < vectorSize && timestamps[startOffset] < timeStart) {
            startOffset++;
        }

        // Find end of bucketInterval.
        for (endOffset = vectorSize - 1; endOffset >= startOffset
                && timestamps[endOffset] >= timeEnd; endOffset--) {
            // nothing needed, "for" is doing the work.
        }

        // Adjust: endOffset is now pointing at the last row to aggregate, but we want it
        // to be one _past_ the last row.
        endOffset++;
    } else {
        endOffset = vectorSize;
    }
}

From source file:org.apache.druid.segment.IndexMergerV9.java

License:Apache License

private void makeIndexBinary(final FileSmoosher v9Smoosher, final List<IndexableAdapter> adapters,
        final File outDir, final List<String> mergedDimensions, final List<String> mergedMetrics,
        final ProgressIndicator progress, final IndexSpec indexSpec, final List<DimensionMergerV9> mergers)
        throws IOException {
    final String section = "make index.drd";
    progress.startSection(section);//w w  w  . j av a2s .  c o m

    long startTime = System.currentTimeMillis();
    final Set<String> finalDimensions = new LinkedHashSet<>();
    final Set<String> finalColumns = new LinkedHashSet<>();
    finalColumns.addAll(mergedMetrics);
    for (int i = 0; i < mergedDimensions.size(); ++i) {
        if (mergers.get(i).canSkip()) {
            continue;
        }
        finalColumns.add(mergedDimensions.get(i));
        finalDimensions.add(mergedDimensions.get(i));
    }

    GenericIndexed<String> cols = GenericIndexed.fromIterable(finalColumns, GenericIndexed.STRING_STRATEGY);
    GenericIndexed<String> dims = GenericIndexed.fromIterable(finalDimensions, GenericIndexed.STRING_STRATEGY);

    final String bitmapSerdeFactoryType = mapper.writeValueAsString(indexSpec.getBitmapSerdeFactory());
    final long numBytes = cols.getSerializedSize() + dims.getSerializedSize() + 16
            + serializerUtils.getSerializedStringByteSize(bitmapSerdeFactoryType);

    final SmooshedWriter writer = v9Smoosher.addWithSmooshedWriter("index.drd", numBytes);
    cols.writeTo(writer, v9Smoosher);
    dims.writeTo(writer, v9Smoosher);

    DateTime minTime = DateTimes.MAX;
    DateTime maxTime = DateTimes.MIN;

    for (IndexableAdapter index : adapters) {
        minTime = JodaUtils.minDateTime(minTime, index.getDataInterval().getStart());
        maxTime = JodaUtils.maxDateTime(maxTime, index.getDataInterval().getEnd());
    }
    final Interval dataInterval = new Interval(minTime, maxTime);

    serializerUtils.writeLong(writer, dataInterval.getStartMillis());
    serializerUtils.writeLong(writer, dataInterval.getEndMillis());

    serializerUtils.writeString(writer, bitmapSerdeFactoryType);
    writer.close();

    IndexIO.checkFileSize(new File(outDir, "index.drd"));
    log.info("Completed index.drd in %,d millis.", System.currentTimeMillis() - startTime);

    progress.stopSection(section);
}

From source file:org.apache.druid.segment.QueryableIndexCursorSequenceBuilder.java

License:Apache License

public Sequence<Cursor> build(final Granularity gran) {
    final Offset baseOffset;

    if (filterBitmap == null) {
        baseOffset = descending ? new SimpleDescendingOffset(index.getNumRows())
                : new SimpleAscendingOffset(index.getNumRows());
    } else {//from ww w .  j  ava2s . c  o  m
        baseOffset = BitmapOffset.of(filterBitmap, descending, index.getNumRows());
    }

    // Column caches shared amongst all cursors in this sequence.
    final Map<String, BaseColumn> columnCache = new HashMap<>();

    final NumericColumn timestamps = (NumericColumn) index.getColumnHolder(ColumnHolder.TIME_COLUMN_NAME)
            .getColumn();

    final Closer closer = Closer.create();
    closer.register(timestamps);

    Iterable<Interval> iterable = gran.getIterable(interval);
    if (descending) {
        iterable = Lists.reverse(ImmutableList.copyOf(iterable));
    }

    return Sequences.withBaggage(Sequences.map(Sequences.simple(iterable), new Function<Interval, Cursor>() {
        @Override
        public Cursor apply(final Interval inputInterval) {
            final long timeStart = Math.max(interval.getStartMillis(), inputInterval.getStartMillis());
            final long timeEnd = Math.min(interval.getEndMillis(),
                    gran.increment(inputInterval.getStart()).getMillis());

            if (descending) {
                for (; baseOffset.withinBounds(); baseOffset.increment()) {
                    if (timestamps.getLongSingleValueRow(baseOffset.getOffset()) < timeEnd) {
                        break;
                    }
                }
            } else {
                for (; baseOffset.withinBounds(); baseOffset.increment()) {
                    if (timestamps.getLongSingleValueRow(baseOffset.getOffset()) >= timeStart) {
                        break;
                    }
                }
            }

            final Offset offset = descending
                    ? new DescendingTimestampCheckingOffset(baseOffset, timestamps, timeStart,
                            minDataTimestamp >= timeStart)
                    : new AscendingTimestampCheckingOffset(baseOffset, timestamps, timeEnd,
                            maxDataTimestamp < timeEnd);

            final Offset baseCursorOffset = offset.clone();
            final ColumnSelectorFactory columnSelectorFactory = new QueryableIndexColumnSelectorFactory(index,
                    virtualColumns, descending, closer, baseCursorOffset.getBaseReadableOffset(), columnCache);
            final DateTime myBucket = gran.toDateTime(inputInterval.getStartMillis());

            if (postFilter == null) {
                return new QueryableIndexCursor(baseCursorOffset, columnSelectorFactory, myBucket);
            } else {
                FilteredOffset filteredOffset = new FilteredOffset(baseCursorOffset, columnSelectorFactory,
                        descending, postFilter, bitmapIndexSelector);
                return new QueryableIndexCursor(filteredOffset, columnSelectorFactory, myBucket);
            }

        }
    }), closer);
}