List of usage examples for org.joda.time Interval getEndMillis
public long getEndMillis()
From source file:io.druid.query.IntervalChunkingQueryRunner.java
License:Apache License
private Iterable<Interval> splitInterval(Interval interval, Period period) { if (interval.getEndMillis() == interval.getStartMillis()) { return Lists.newArrayList(interval); }/*from w ww . ja v a2 s. c om*/ List<Interval> intervals = Lists.newArrayList(); Iterator<Long> timestamps = new PeriodGranularity(period, null, null) .iterable(interval.getStartMillis(), interval.getEndMillis()).iterator(); long start = Math.max(timestamps.next(), interval.getStartMillis()); while (timestamps.hasNext()) { long end = timestamps.next(); intervals.add(new Interval(start, end)); start = end; } if (start < interval.getEndMillis()) { intervals.add(new Interval(start, interval.getEndMillis())); } return intervals; }
From source file:io.druid.query.materializedview.MaterializedViewUtils.java
License:Apache License
/** * caculate the intervals which are covered by interval2, but not covered by interval1. * result intervals = interval2 - interval1 interval2 * e.g. /*from w w w . j a v a 2s . c om*/ * a list of interval2: ["2018-04-01T00:00:00.000Z/2018-04-02T00:00:00.000Z", * "2018-04-03T00:00:00.000Z/2018-04-10T00:00:00.000Z"] * a list of interval1: ["2018-04-04T00:00:00.000Z/2018-04-06T00:00:00.000Z"] * the result list of intervals: ["2018-04-01T00:00:00.000Z/2018-04-02T00:00:00.000Z", * "2018-04-03T00:00:00.000Z/2018-04-04T00:00:00.000Z", * "2018-04-06T00:00:00.000Z/2018-04-10T00:00:00.000Z"] * If interval2 is empty, then return an empty list of interval. * @param interval2 list of intervals * @param interval1 list of intervals * @return list of intervals are covered by interval2, but not covered by interval1. */ public static List<Interval> minus(List<Interval> interval2, List<Interval> interval1) { if (interval1.isEmpty() || interval2.isEmpty()) { return interval1; } Iterator<Interval> it1 = JodaUtils.condenseIntervals(interval1).iterator(); Iterator<Interval> it2 = JodaUtils.condenseIntervals(interval2).iterator(); List<Interval> remaining = Lists.newArrayList(); Interval currInterval1 = it1.next(); Interval currInterval2 = it2.next(); long start1 = currInterval1.getStartMillis(); long end1 = currInterval1.getEndMillis(); long start2 = currInterval2.getStartMillis(); long end2 = currInterval2.getEndMillis(); while (true) { if (start2 < start1 && end2 <= start1) { remaining.add(Intervals.utc(start2, end2)); if (it2.hasNext()) { currInterval2 = it2.next(); start2 = currInterval2.getStartMillis(); end2 = currInterval2.getEndMillis(); } else { break; } } if (start2 < start1 && end2 > start1 && end2 < end1) { remaining.add(Intervals.utc(start2, start1)); start1 = end2; if (it2.hasNext()) { currInterval2 = it2.next(); start2 = currInterval2.getStartMillis(); end2 = currInterval2.getEndMillis(); } else { break; } } if (start2 < start1 && end2 == end1) { remaining.add(Intervals.utc(start2, start1)); if (it2.hasNext() && it1.hasNext()) { currInterval2 = it2.next(); start2 = currInterval2.getStartMillis(); end2 = currInterval2.getEndMillis(); currInterval1 = it1.next(); start1 = currInterval1.getStartMillis(); end1 = currInterval1.getEndMillis(); } else { break; } } if (start2 < start1 && end2 > end1) { remaining.add(Intervals.utc(start2, start1)); start2 = end1; if (it1.hasNext()) { currInterval1 = it1.next(); start1 = currInterval1.getStartMillis(); end1 = currInterval1.getEndMillis(); } else { remaining.add(Intervals.utc(end1, end2)); break; } } if (start2 == start1 && end2 >= start1 && end2 < end1) { start1 = end2; if (it2.hasNext()) { currInterval2 = it2.next(); start2 = currInterval2.getStartMillis(); end2 = currInterval2.getEndMillis(); } else { break; } } if (start2 == start1 && end2 > end1) { start2 = end1; if (it1.hasNext()) { currInterval1 = it1.next(); start1 = currInterval1.getStartMillis(); end1 = currInterval1.getEndMillis(); } else { remaining.add(Intervals.utc(end1, end2)); break; } } if (start2 > start1 && start2 < end1 && end2 < end1) { start1 = end2; if (it2.hasNext()) { currInterval2 = it2.next(); start2 = currInterval2.getStartMillis(); end2 = currInterval2.getEndMillis(); } else { break; } } if (start2 > start1 && start2 < end1 && end2 > end1) { start2 = end1; if (it1.hasNext()) { currInterval1 = it1.next(); start1 = currInterval1.getStartMillis(); end1 = currInterval1.getEndMillis(); } else { remaining.add(Intervals.utc(end1, end2)); break; } } if (start2 >= start1 && start2 <= end1 && end2 == end1) { if (it2.hasNext() && it1.hasNext()) { currInterval2 = it2.next(); start2 = currInterval2.getStartMillis(); end2 = currInterval2.getEndMillis(); currInterval1 = it1.next(); start1 = currInterval1.getStartMillis(); end1 = currInterval1.getEndMillis(); } else { break; } } if (start2 >= end1 && end2 > end1) { if (it1.hasNext()) { currInterval1 = it1.next(); start1 = currInterval1.getStartMillis(); end1 = currInterval1.getEndMillis(); } else { remaining.add(Intervals.utc(start2, end2)); break; } } } while (it2.hasNext()) { remaining.add(Intervals.of(it2.next().toString())); } return remaining; }
From source file:io.druid.query.search.search.UseIndexesStrategy.java
License:Apache License
static ImmutableBitmap makeTimeFilteredBitmap(final QueryableIndex index, final Segment segment, final Filter filter, final Interval interval) { final BitmapFactory bitmapFactory = index.getBitmapFactoryForDimensions(); final ImmutableBitmap baseFilter; if (filter == null) { baseFilter = null;//from ww w . ja v a2 s . c om } else { final BitmapIndexSelector selector = new ColumnSelectorBitmapIndexSelector( index.getBitmapFactoryForDimensions(), VirtualColumns.EMPTY, index); Preconditions.checkArgument(filter.supportsBitmapIndex(selector), "filter[%s] should support bitmap", filter); baseFilter = filter.getBitmapIndex(selector); } final ImmutableBitmap timeFilteredBitmap; if (!interval.contains(segment.getDataInterval())) { final MutableBitmap timeBitmap = bitmapFactory.makeEmptyMutableBitmap(); final Column timeColumn = index.getColumn(Column.TIME_COLUMN_NAME); try (final GenericColumn timeValues = timeColumn.getGenericColumn()) { int startIndex = Math.max(0, getStartIndexOfTime(timeValues, interval.getStartMillis(), true)); int endIndex = Math.min(timeValues.length() - 1, getStartIndexOfTime(timeValues, interval.getEndMillis(), false)); for (int i = startIndex; i <= endIndex; i++) { timeBitmap.add(i); } final ImmutableBitmap finalTimeBitmap = bitmapFactory.makeImmutableBitmap(timeBitmap); timeFilteredBitmap = (baseFilter == null) ? finalTimeBitmap : finalTimeBitmap.intersection(baseFilter); } } else { timeFilteredBitmap = baseFilter; } return timeFilteredBitmap; }
From source file:io.druid.query.search.SearchQueryRunner.java
License:Apache License
@Override public Sequence<Result<SearchResultValue>> run(final Query<Result<SearchResultValue>> input, Map<String, Object> responseContext) { if (!(input instanceof SearchQuery)) { throw new ISE("Got a [%s] which isn't a %s", input.getClass(), SearchQuery.class); }/* www .j av a 2s . c o m*/ final SearchQuery query = (SearchQuery) input; final Filter filter = Filters.convertToCNFFromQueryContext(query, Filters.toFilter(query.getDimensionsFilter())); final List<DimensionSpec> dimensions = query.getDimensions(); final SearchQuerySpec searchQuerySpec = query.getQuery(); final int limit = query.getLimit(); final boolean descending = query.isDescending(); final List<Interval> intervals = query.getQuerySegmentSpec().getIntervals(); if (intervals.size() != 1) { throw new IAE("Should only have one interval, got[%s]", intervals); } final Interval interval = intervals.get(0); // Closing this will cause segfaults in unit tests. final QueryableIndex index = segment.asQueryableIndex(); if (index != null) { final TreeMap<SearchHit, MutableInt> retVal = Maps.newTreeMap(query.getSort().getComparator()); Iterable<DimensionSpec> dimsToSearch; if (dimensions == null || dimensions.isEmpty()) { dimsToSearch = Iterables.transform(index.getAvailableDimensions(), Druids.DIMENSION_IDENTITY); } else { dimsToSearch = dimensions; } final BitmapFactory bitmapFactory = index.getBitmapFactoryForDimensions(); final ImmutableBitmap baseFilter = filter == null ? null : filter.getBitmapIndex(new ColumnSelectorBitmapIndexSelector(bitmapFactory, index)); ImmutableBitmap timeFilteredBitmap; if (!interval.contains(segment.getDataInterval())) { MutableBitmap timeBitmap = bitmapFactory.makeEmptyMutableBitmap(); final Column timeColumn = index.getColumn(Column.TIME_COLUMN_NAME); final GenericColumn timeValues = timeColumn.getGenericColumn(); int startIndex = Math.max(0, getStartIndexOfTime(timeValues, interval.getStartMillis(), true)); int endIndex = Math.min(timeValues.length() - 1, getStartIndexOfTime(timeValues, interval.getEndMillis(), false)); for (int i = startIndex; i <= endIndex; i++) { timeBitmap.add(i); } final ImmutableBitmap finalTimeBitmap = bitmapFactory.makeImmutableBitmap(timeBitmap); timeFilteredBitmap = (baseFilter == null) ? finalTimeBitmap : finalTimeBitmap.intersection(baseFilter); } else { timeFilteredBitmap = baseFilter; } for (DimensionSpec dimension : dimsToSearch) { final Column column = index.getColumn(dimension.getDimension()); if (column == null) { continue; } final BitmapIndex bitmapIndex = column.getBitmapIndex(); ExtractionFn extractionFn = dimension.getExtractionFn(); if (extractionFn == null) { extractionFn = IdentityExtractionFn.getInstance(); } if (bitmapIndex != null) { for (int i = 0; i < bitmapIndex.getCardinality(); ++i) { String dimVal = Strings.nullToEmpty(extractionFn.apply(bitmapIndex.getValue(i))); if (!searchQuerySpec.accept(dimVal)) { continue; } ImmutableBitmap bitmap = bitmapIndex.getBitmap(i); if (timeFilteredBitmap != null) { bitmap = bitmapFactory.intersection(Arrays.asList(timeFilteredBitmap, bitmap)); } if (bitmap.size() > 0) { MutableInt counter = new MutableInt(bitmap.size()); MutableInt prev = retVal.put(new SearchHit(dimension.getOutputName(), dimVal), counter); if (prev != null) { counter.add(prev.intValue()); } if (retVal.size() >= limit) { return makeReturnResult(limit, retVal); } } } } } return makeReturnResult(limit, retVal); } final StorageAdapter adapter = segment.asStorageAdapter(); if (adapter == null) { log.makeAlert("WTF!? Unable to process search query on segment.") .addData("segment", segment.getIdentifier()).addData("query", query).emit(); throw new ISE( "Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped."); } final Iterable<DimensionSpec> dimsToSearch; if (dimensions == null || dimensions.isEmpty()) { dimsToSearch = Iterables.transform(adapter.getAvailableDimensions(), Druids.DIMENSION_IDENTITY); } else { dimsToSearch = dimensions; } final Sequence<Cursor> cursors = adapter.makeCursors(filter, interval, query.getGranularity(), descending); final TreeMap<SearchHit, MutableInt> retVal = cursors.accumulate( Maps.<SearchHit, SearchHit, MutableInt>newTreeMap(query.getSort().getComparator()), new Accumulator<TreeMap<SearchHit, MutableInt>, Cursor>() { @Override public TreeMap<SearchHit, MutableInt> accumulate(TreeMap<SearchHit, MutableInt> set, Cursor cursor) { if (set.size() >= limit) { return set; } Map<String, DimensionSelector> dimSelectors = Maps.newHashMap(); for (DimensionSpec dim : dimsToSearch) { dimSelectors.put(dim.getOutputName(), cursor.makeDimensionSelector(dim)); } while (!cursor.isDone()) { for (Map.Entry<String, DimensionSelector> entry : dimSelectors.entrySet()) { final DimensionSelector selector = entry.getValue(); if (selector != null) { final IndexedInts vals = selector.getRow(); for (int i = 0; i < vals.size(); ++i) { final String dimVal = selector.lookupName(vals.get(i)); if (searchQuerySpec.accept(dimVal)) { MutableInt counter = new MutableInt(1); MutableInt prev = set.put(new SearchHit(entry.getKey(), dimVal), counter); if (prev != null) { counter.add(prev.intValue()); } if (set.size() >= limit) { return set; } } } } } cursor.advance(); } return set; } }); return makeReturnResult(limit, retVal); }
From source file:io.druid.query.TimewarpOperator.java
License:Apache License
public QueryRunner<T> postProcess(final QueryRunner<T> baseRunner, final long now) { return new QueryRunner<T>() { @Override// w w w . jav a2s . c om public Sequence<T> run(final Query<T> query, final Map<String, Object> responseContext) { final long offset = computeOffset(now); final Interval interval = query.getIntervals().get(0); final Interval modifiedInterval = new Interval( Math.min(interval.getStartMillis() + offset, now + offset), Math.min(interval.getEndMillis() + offset, now + offset)); return Sequences .map(baseRunner.run( query.withQuerySegmentSpec( new MultipleIntervalSegmentSpec(Arrays.asList(modifiedInterval))), responseContext), new Function<T, T>() { @Override public T apply(T input) { if (input instanceof Result) { Result res = (Result) input; Object value = res.getValue(); if (value instanceof TimeBoundaryResultValue) { TimeBoundaryResultValue boundary = (TimeBoundaryResultValue) value; DateTime minTime = null; try { minTime = boundary.getMinTime(); } catch (IllegalArgumentException e) { } final DateTime maxTime = boundary.getMaxTime(); return (T) ((TimeBoundaryQuery) query).buildResult( new DateTime(Math .min(res.getTimestamp().getMillis() - offset, now)), minTime != null ? minTime.minus(offset) : null, maxTime != null ? new DateTime( Math.min(maxTime.getMillis() - offset, now)) : null) .iterator().next(); } return (T) new Result(res.getTimestamp().minus(offset), value); } else if (input instanceof MapBasedRow) { MapBasedRow row = (MapBasedRow) input; return (T) new MapBasedRow(row.getTimestamp().minus(offset), row.getEvent()); } // default to noop for unknown result types return input; } }); } }; }
From source file:io.druid.segment.incremental.IncrementalIndexStorageAdapter.java
License:Apache License
@Override public Sequence<Cursor> makeCursors(final Filter filter, final Interval interval, final QueryGranularity gran) { if (index.isEmpty()) { return Sequences.empty(); }//from w w w. j a v a2s . co m Interval actualIntervalTmp = interval; final Interval dataInterval = new Interval(getMinTime().getMillis(), gran.next(gran.truncate(getMaxTime().getMillis()))); if (!actualIntervalTmp.overlaps(dataInterval)) { return Sequences.empty(); } if (actualIntervalTmp.getStart().isBefore(dataInterval.getStart())) { actualIntervalTmp = actualIntervalTmp.withStart(dataInterval.getStart()); } if (actualIntervalTmp.getEnd().isAfter(dataInterval.getEnd())) { actualIntervalTmp = actualIntervalTmp.withEnd(dataInterval.getEnd()); } final Interval actualInterval = actualIntervalTmp; return Sequences.map( Sequences.simple(gran.iterable(actualInterval.getStartMillis(), actualInterval.getEndMillis())), new Function<Long, Cursor>() { EntryHolder currEntry = new EntryHolder(); private final ValueMatcher filterMatcher; { filterMatcher = makeFilterMatcher(filter, currEntry); } @Override public Cursor apply(@Nullable final Long input) { final long timeStart = Math.max(input, actualInterval.getStartMillis()); return new Cursor() { private Iterator<Map.Entry<IncrementalIndex.TimeAndDims, Integer>> baseIter; private ConcurrentNavigableMap<IncrementalIndex.TimeAndDims, Integer> cursorMap; final DateTime time; int numAdvanced = -1; boolean done; { cursorMap = index.getSubMap( new IncrementalIndex.TimeAndDims(timeStart, new String[][] {}), new IncrementalIndex.TimeAndDims( Math.min(actualInterval.getEndMillis(), gran.next(input)), new String[][] {})); time = gran.toDateTime(input); reset(); } @Override public DateTime getTime() { return time; } @Override public void advance() { if (!baseIter.hasNext()) { done = true; return; } while (baseIter.hasNext()) { if (Thread.interrupted()) { throw new QueryInterruptedException(); } currEntry.set(baseIter.next()); if (filterMatcher.matches()) { return; } } if (!filterMatcher.matches()) { done = true; } } @Override public void advanceTo(int offset) { int count = 0; while (count < offset && !isDone()) { advance(); count++; } } @Override public boolean isDone() { return done; } @Override public void reset() { baseIter = cursorMap.entrySet().iterator(); if (numAdvanced == -1) { numAdvanced = 0; } else { Iterators.advance(baseIter, numAdvanced); } if (Thread.interrupted()) { throw new QueryInterruptedException(); } boolean foundMatched = false; while (baseIter.hasNext()) { currEntry.set(baseIter.next()); if (filterMatcher.matches()) { foundMatched = true; break; } numAdvanced++; } done = !foundMatched && (cursorMap.size() == 0 || !baseIter.hasNext()); } @Override public DimensionSelector makeDimensionSelector(final String dimension, @Nullable final ExtractionFn extractionFn) { if (dimension.equals(Column.TIME_COLUMN_NAME)) { return new SingleScanTimeDimSelector(makeLongColumnSelector(dimension), extractionFn); } final IncrementalIndex.DimDim dimValLookup = index.getDimension(dimension); if (dimValLookup == null) { return NULL_DIMENSION_SELECTOR; } final int maxId = dimValLookup.size(); final int dimIndex = index.getDimensionIndex(dimension); return new DimensionSelector() { @Override public IndexedInts getRow() { final ArrayList<Integer> vals = Lists.newArrayList(); if (dimIndex < currEntry.getKey().getDims().length) { final String[] dimVals = currEntry.getKey().getDims()[dimIndex]; if (dimVals != null) { for (String dimVal : dimVals) { int id = dimValLookup.getId(dimVal); if (id < maxId) { vals.add(id); } } } } // check for null entry if (vals.isEmpty() && dimValLookup.contains(null)) { int id = dimValLookup.getId(null); if (id < maxId) { vals.add(id); } } return new IndexedInts() { @Override public int size() { return vals.size(); } @Override public int get(int index) { return vals.get(index); } @Override public Iterator<Integer> iterator() { return vals.iterator(); } @Override public void fill(int index, int[] toFill) { throw new UnsupportedOperationException("fill not supported"); } @Override public void close() throws IOException { } }; } @Override public int getValueCardinality() { return maxId; } @Override public String lookupName(int id) { final String value = dimValLookup.getValue(id); return extractionFn == null ? value : extractionFn.apply(value); } @Override public int lookupId(String name) { if (extractionFn != null) { throw new UnsupportedOperationException( "cannot perform lookup when applying an extraction function"); } return dimValLookup.getId(name); } }; } @Override public FloatColumnSelector makeFloatColumnSelector(String columnName) { final Integer metricIndexInt = index.getMetricIndex(columnName); if (metricIndexInt == null) { return new FloatColumnSelector() { @Override public float get() { return 0.0f; } }; } final int metricIndex = metricIndexInt; return new FloatColumnSelector() { @Override public float get() { return index.getMetricFloatValue(currEntry.getValue(), metricIndex); } }; } @Override public LongColumnSelector makeLongColumnSelector(String columnName) { if (columnName.equals(Column.TIME_COLUMN_NAME)) { return new LongColumnSelector() { @Override public long get() { return currEntry.getKey().getTimestamp(); } }; } final Integer metricIndexInt = index.getMetricIndex(columnName); if (metricIndexInt == null) { return new LongColumnSelector() { @Override public long get() { return 0L; } }; } final int metricIndex = metricIndexInt; return new LongColumnSelector() { @Override public long get() { return index.getMetricLongValue(currEntry.getValue(), metricIndex); } }; } @Override public ObjectColumnSelector makeObjectColumnSelector(String column) { if (column.equals(Column.TIME_COLUMN_NAME)) { return new ObjectColumnSelector<Long>() { @Override public Class classOfObject() { return Long.TYPE; } @Override public Long get() { return currEntry.getKey().getTimestamp(); } }; } final Integer metricIndexInt = index.getMetricIndex(column); if (metricIndexInt != null) { final int metricIndex = metricIndexInt; final ComplexMetricSerde serde = ComplexMetrics .getSerdeForType(index.getMetricType(column)); return new ObjectColumnSelector() { @Override public Class classOfObject() { return serde.getObjectStrategy().getClazz(); } @Override public Object get() { return index.getMetricObjectValue(currEntry.getValue(), metricIndex); } }; } final Integer dimensionIndexInt = index.getDimensionIndex(column); if (dimensionIndexInt != null) { final int dimensionIndex = dimensionIndexInt; return new ObjectColumnSelector<Object>() { @Override public Class classOfObject() { return Object.class; } @Override public Object get() { IncrementalIndex.TimeAndDims key = currEntry.getKey(); if (key == null) { return null; } String[][] dims = key.getDims(); if (dimensionIndex >= dims.length) { return null; } final String[] dimVals = dims[dimensionIndex]; if (dimVals == null || dimVals.length == 0) { return null; } if (dimVals.length == 1) { return dimVals[0]; } return dimVals; } }; } return null; } }; } }); }
From source file:io.druid.segment.IndexMaker.java
License:Apache License
private static void makeIndexBinary(final FileSmoosher v9Smoosher, final List<IndexableAdapter> adapters, final File outDir, final List<String> mergedDimensions, final List<String> mergedMetrics, final Set<String> skippedDimensions, final ProgressIndicator progress, final IndexSpec indexSpec) throws IOException { final String section = "building index.drd"; progress.startSection(section);/*w w w . j a v a2s .com*/ final Set<String> finalColumns = Sets.newTreeSet(); finalColumns.addAll(mergedDimensions); finalColumns.addAll(mergedMetrics); finalColumns.removeAll(skippedDimensions); final Iterable<String> finalDimensions = Iterables.filter(mergedDimensions, new Predicate<String>() { @Override public boolean apply(String input) { return !skippedDimensions.contains(input); } }); GenericIndexed<String> cols = GenericIndexed.fromIterable(finalColumns, GenericIndexed.STRING_STRATEGY); GenericIndexed<String> dims = GenericIndexed.fromIterable(finalDimensions, GenericIndexed.STRING_STRATEGY); final String bitmapSerdeFactoryType = mapper.writeValueAsString(indexSpec.getBitmapSerdeFactory()); final long numBytes = cols.getSerializedSize() + dims.getSerializedSize() + 16 + serializerUtils.getSerializedStringByteSize(bitmapSerdeFactoryType); final SmooshedWriter writer = v9Smoosher.addWithSmooshedWriter("index.drd", numBytes); cols.writeToChannel(writer); dims.writeToChannel(writer); DateTime minTime = new DateTime(JodaUtils.MAX_INSTANT); DateTime maxTime = new DateTime(JodaUtils.MIN_INSTANT); for (IndexableAdapter index : adapters) { minTime = JodaUtils.minDateTime(minTime, index.getDataInterval().getStart()); maxTime = JodaUtils.maxDateTime(maxTime, index.getDataInterval().getEnd()); } final Interval dataInterval = new Interval(minTime, maxTime); serializerUtils.writeLong(writer, dataInterval.getStartMillis()); serializerUtils.writeLong(writer, dataInterval.getEndMillis()); serializerUtils.writeString(writer, bitmapSerdeFactoryType); writer.close(); IndexIO.checkFileSize(new File(outDir, "index.drd")); progress.stopSection(section); }
From source file:io.druid.segment.IndexMergerV9.java
License:Apache License
private void makeIndexBinary(final FileSmoosher v9Smoosher, final List<IndexableAdapter> adapters, final File outDir, final List<String> mergedDimensions, final List<String> mergedMetrics, final ProgressIndicator progress, final IndexSpec indexSpec, final List<DimensionMerger> mergers) throws IOException { final String section = "make index.drd"; progress.startSection(section);//ww w .ja v a 2 s .c o m long startTime = System.currentTimeMillis(); final Set<String> finalDimensions = Sets.newLinkedHashSet(); final Set<String> finalColumns = Sets.newLinkedHashSet(); finalColumns.addAll(mergedMetrics); for (int i = 0; i < mergedDimensions.size(); ++i) { if (mergers.get(i).canSkip()) { continue; } finalColumns.add(mergedDimensions.get(i)); finalDimensions.add(mergedDimensions.get(i)); } GenericIndexed<String> cols = GenericIndexed.fromIterable(finalColumns, GenericIndexed.STRING_STRATEGY); GenericIndexed<String> dims = GenericIndexed.fromIterable(finalDimensions, GenericIndexed.STRING_STRATEGY); final String bitmapSerdeFactoryType = mapper.writeValueAsString(indexSpec.getBitmapSerdeFactory()); final long numBytes = cols.getSerializedSize() + dims.getSerializedSize() + 16 + serializerUtils.getSerializedStringByteSize(bitmapSerdeFactoryType); final SmooshedWriter writer = v9Smoosher.addWithSmooshedWriter("index.drd", numBytes); cols.writeToChannel(writer); dims.writeToChannel(writer); DateTime minTime = new DateTime(JodaUtils.MAX_INSTANT); DateTime maxTime = new DateTime(JodaUtils.MIN_INSTANT); for (IndexableAdapter index : adapters) { minTime = JodaUtils.minDateTime(minTime, index.getDataInterval().getStart()); maxTime = JodaUtils.maxDateTime(maxTime, index.getDataInterval().getEnd()); } final Interval dataInterval = new Interval(minTime, maxTime); serializerUtils.writeLong(writer, dataInterval.getStartMillis()); serializerUtils.writeLong(writer, dataInterval.getEndMillis()); serializerUtils.writeString(writer, bitmapSerdeFactoryType); writer.close(); IndexIO.checkFileSize(new File(outDir, "index.drd")); log.info("Completed index.drd in %,d millis.", System.currentTimeMillis() - startTime); progress.stopSection(section); }
From source file:io.druid.sql.calcite.expression.Expressions.java
License:Apache License
/** * Translates to a simple leaf filter, meaning one that hits just a single column and is not an expression filter. *///w ww . ja v a 2 s . com private static DimFilter toSimpleLeafFilter(final PlannerContext plannerContext, final RowSignature rowSignature, final RexNode rexNode) { final SqlKind kind = rexNode.getKind(); if (kind == SqlKind.IS_TRUE || kind == SqlKind.IS_NOT_FALSE) { return toSimpleLeafFilter(plannerContext, rowSignature, Iterables.getOnlyElement(((RexCall) rexNode).getOperands())); } else if (kind == SqlKind.IS_FALSE || kind == SqlKind.IS_NOT_TRUE) { return new NotDimFilter(toSimpleLeafFilter(plannerContext, rowSignature, Iterables.getOnlyElement(((RexCall) rexNode).getOperands()))); } else if (kind == SqlKind.IS_NULL || kind == SqlKind.IS_NOT_NULL) { final RexNode operand = Iterables.getOnlyElement(((RexCall) rexNode).getOperands()); // operand must be translatable to a SimpleExtraction to be simple-filterable final DruidExpression druidExpression = toDruidExpression(plannerContext, rowSignature, operand); if (druidExpression == null || !druidExpression.isSimpleExtraction()) { return null; } final BoundDimFilter equalFilter = Bounds.equalTo(new BoundRefKey( druidExpression.getSimpleExtraction().getColumn(), druidExpression.getSimpleExtraction().getExtractionFn(), StringComparators.LEXICOGRAPHIC), ""); return kind == SqlKind.IS_NOT_NULL ? new NotDimFilter(equalFilter) : equalFilter; } else if (kind == SqlKind.EQUALS || kind == SqlKind.NOT_EQUALS || kind == SqlKind.GREATER_THAN || kind == SqlKind.GREATER_THAN_OR_EQUAL || kind == SqlKind.LESS_THAN || kind == SqlKind.LESS_THAN_OR_EQUAL) { final List<RexNode> operands = ((RexCall) rexNode).getOperands(); Preconditions.checkState(operands.size() == 2, "WTF?! Expected 2 operands, got[%,d]", operands.size()); boolean flip = false; RexNode lhs = operands.get(0); RexNode rhs = operands.get(1); if (lhs.getKind() == SqlKind.LITERAL && rhs.getKind() != SqlKind.LITERAL) { // swap lhs, rhs RexNode x = lhs; lhs = rhs; rhs = x; flip = true; } // rhs must be a literal if (rhs.getKind() != SqlKind.LITERAL) { return null; } // lhs must be translatable to a SimpleExtraction to be simple-filterable final DruidExpression lhsExpression = toDruidExpression(plannerContext, rowSignature, lhs); if (lhsExpression == null || !lhsExpression.isSimpleExtraction()) { return null; } final String column = lhsExpression.getSimpleExtraction().getColumn(); final ExtractionFn extractionFn = lhsExpression.getSimpleExtraction().getExtractionFn(); if (column.equals(Column.TIME_COLUMN_NAME) && extractionFn instanceof TimeFormatExtractionFn) { // Check if we can strip the extractionFn and convert the filter to a direct filter on __time. // This allows potential conversion to query-level "intervals" later on, which is ideal for Druid queries. final Granularity granularity = ExtractionFns.toQueryGranularity(extractionFn); if (granularity != null) { // lhs is FLOOR(__time TO granularity); rhs must be a timestamp final long rhsMillis = Calcites.calciteDateTimeLiteralToJoda(rhs, plannerContext.getTimeZone()) .getMillis(); final Interval rhsInterval = granularity.bucket(new DateTime(rhsMillis)); // Is rhs aligned on granularity boundaries? final boolean rhsAligned = rhsInterval.getStartMillis() == rhsMillis; // Create a BoundRefKey that strips the extractionFn and compares __time as a number. final BoundRefKey boundRefKey = new BoundRefKey(column, null, StringComparators.NUMERIC); if (kind == SqlKind.EQUALS) { return rhsAligned ? Bounds.interval(boundRefKey, rhsInterval) : Filtration.matchNothing(); } else if (kind == SqlKind.NOT_EQUALS) { return rhsAligned ? new NotDimFilter(Bounds.interval(boundRefKey, rhsInterval)) : Filtration.matchEverything(); } else if ((!flip && kind == SqlKind.GREATER_THAN) || (flip && kind == SqlKind.LESS_THAN)) { return Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(rhsInterval.getEndMillis())); } else if ((!flip && kind == SqlKind.GREATER_THAN_OR_EQUAL) || (flip && kind == SqlKind.LESS_THAN_OR_EQUAL)) { return rhsAligned ? Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(rhsInterval.getStartMillis())) : Bounds.greaterThanOrEqualTo(boundRefKey, String.valueOf(rhsInterval.getEndMillis())); } else if ((!flip && kind == SqlKind.LESS_THAN) || (flip && kind == SqlKind.GREATER_THAN)) { return rhsAligned ? Bounds.lessThan(boundRefKey, String.valueOf(rhsInterval.getStartMillis())) : Bounds.lessThan(boundRefKey, String.valueOf(rhsInterval.getEndMillis())); } else if ((!flip && kind == SqlKind.LESS_THAN_OR_EQUAL) || (flip && kind == SqlKind.GREATER_THAN_OR_EQUAL)) { return Bounds.lessThan(boundRefKey, String.valueOf(rhsInterval.getEndMillis())); } else { throw new IllegalStateException("WTF?! Shouldn't have got here..."); } } } final String val; final RexLiteral rhsLiteral = (RexLiteral) rhs; if (SqlTypeName.NUMERIC_TYPES.contains(rhsLiteral.getTypeName())) { val = String.valueOf(RexLiteral.value(rhsLiteral)); } else if (SqlTypeName.CHAR_TYPES.contains(rhsLiteral.getTypeName())) { val = String.valueOf(RexLiteral.stringValue(rhsLiteral)); } else if (SqlTypeName.TIMESTAMP == rhsLiteral.getTypeName() || SqlTypeName.DATE == rhsLiteral.getTypeName()) { val = String.valueOf(Calcites.calciteDateTimeLiteralToJoda(rhsLiteral, plannerContext.getTimeZone()) .getMillis()); } else { // Don't know how to filter on this kind of literal. return null; } // Numeric lhs needs a numeric comparison. final StringComparator comparator = Calcites .getStringComparatorForSqlTypeName(lhs.getType().getSqlTypeName()); final BoundRefKey boundRefKey = new BoundRefKey(column, extractionFn, comparator); final DimFilter filter; // Always use BoundDimFilters, to simplify filter optimization later (it helps to remember the comparator). if (kind == SqlKind.EQUALS) { filter = Bounds.equalTo(boundRefKey, val); } else if (kind == SqlKind.NOT_EQUALS) { filter = new NotDimFilter(Bounds.equalTo(boundRefKey, val)); } else if ((!flip && kind == SqlKind.GREATER_THAN) || (flip && kind == SqlKind.LESS_THAN)) { filter = Bounds.greaterThan(boundRefKey, val); } else if ((!flip && kind == SqlKind.GREATER_THAN_OR_EQUAL) || (flip && kind == SqlKind.LESS_THAN_OR_EQUAL)) { filter = Bounds.greaterThanOrEqualTo(boundRefKey, val); } else if ((!flip && kind == SqlKind.LESS_THAN) || (flip && kind == SqlKind.GREATER_THAN)) { filter = Bounds.lessThan(boundRefKey, val); } else if ((!flip && kind == SqlKind.LESS_THAN_OR_EQUAL) || (flip && kind == SqlKind.GREATER_THAN_OR_EQUAL)) { filter = Bounds.lessThanOrEqualTo(boundRefKey, val); } else { throw new IllegalStateException("WTF?! Shouldn't have got here..."); } return filter; } else if (kind == SqlKind.LIKE) { final List<RexNode> operands = ((RexCall) rexNode).getOperands(); final DruidExpression druidExpression = toDruidExpression(plannerContext, rowSignature, operands.get(0)); if (druidExpression == null || !druidExpression.isSimpleExtraction()) { return null; } return new LikeDimFilter(druidExpression.getSimpleExtraction().getColumn(), RexLiteral.stringValue(operands.get(1)), operands.size() > 2 ? RexLiteral.stringValue(operands.get(2)) : null, druidExpression.getSimpleExtraction().getExtractionFn()); } else { return null; } }
From source file:io.druid.sql.calcite.filtration.Bounds.java
License:Apache License
public static BoundDimFilter interval(final BoundRefKey boundRefKey, final Interval interval) { if (!boundRefKey.getComparator().equals(StringComparators.NUMERIC)) { // Interval comparison only works with NUMERIC comparator. throw new ISE("Comparator must be NUMERIC but was[%s]", boundRefKey.getComparator()); }/*w ww .j av a2 s . c o m*/ return new BoundDimFilter(boundRefKey.getDimension(), String.valueOf(interval.getStartMillis()), String.valueOf(interval.getEndMillis()), false, true, null, boundRefKey.getExtractionFn(), boundRefKey.getComparator()); }