List of usage examples for org.apache.lucene.search DocIdSet EMPTY
DocIdSet EMPTY
To view the source code for org.apache.lucene.search DocIdSet EMPTY.
Click Source Link
From source file:org.elasticsearch.index.cache.fixedbitset.FixedBitSetFilterCache.java
License:Apache License
private FixedBitSet getAndLoadIfNotPresent(final Filter filter, final AtomicReaderContext context) throws IOException, ExecutionException { final Object coreCacheReader = context.reader().getCoreCacheKey(); final ShardId shardId = ShardUtils.extractShardId(context.reader()); Cache<Filter, Value> filterToFbs = loadedFilters.get(coreCacheReader, new Callable<Cache<Filter, Value>>() { @Override/* w ww. j a va 2 s .c om*/ public Cache<Filter, Value> call() throws Exception { SegmentReaderUtils.registerCoreListener(context.reader(), FixedBitSetFilterCache.this); return CacheBuilder.newBuilder().build(); } }); return filterToFbs.get(filter, new Callable<Value>() { @Override public Value call() throws Exception { DocIdSet docIdSet = filter.getDocIdSet(context, null); final FixedBitSet fixedBitSet; if (docIdSet instanceof FixedBitSet) { fixedBitSet = (FixedBitSet) docIdSet; } else { fixedBitSet = new FixedBitSet(context.reader().maxDoc()); if (docIdSet != null && docIdSet != DocIdSet.EMPTY) { DocIdSetIterator iterator = docIdSet.iterator(); if (iterator != null) { int doc = iterator.nextDoc(); if (doc != DocIdSetIterator.NO_MORE_DOCS) { do { fixedBitSet.set(doc); doc = iterator.nextDoc(); } while (doc != DocIdSetIterator.NO_MORE_DOCS); } } } } Value value = new Value(fixedBitSet, shardId); if (shardId != null) { IndexShard shard = indexService.shard(shardId.id()); if (shard != null) { shard.shardFixedBitSetFilterCache().onCached(value.fixedBitSet.ramBytesUsed()); } } return value; } }).fixedBitSet; }
From source file:org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesCollectorQueueTests.java
License:Apache License
private void testRandomCase(boolean forceMerge, ClassAndName... types) throws IOException { final BigArrays bigArrays = BigArrays.NON_RECYCLING_INSTANCE; int numDocs = randomIntBetween(50, 100); List<Comparable<?>[]> possibleValues = new ArrayList<>(); for (ClassAndName type : types) { int numValues = randomIntBetween(1, numDocs * 2); Comparable<?>[] values = new Comparable[numValues]; if (type.clazz == Long.class) { for (int i = 0; i < numValues; i++) { values[i] = randomLong(); }//w w w. j ava2s . c om } else if (type.clazz == Double.class) { for (int i = 0; i < numValues; i++) { values[i] = randomDouble(); } } else if (type.clazz == BytesRef.class) { for (int i = 0; i < numValues; i++) { values[i] = new BytesRef(randomAlphaOfLengthBetween(5, 50)); } } else { assert (false); } possibleValues.add(values); } Set<CompositeKey> keys = new HashSet<>(); try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory, new KeywordAnalyzer())) { for (int i = 0; i < numDocs; i++) { Document document = new Document(); List<List<Comparable<?>>> docValues = new ArrayList<>(); boolean hasAllField = true; for (int j = 0; j < types.length; j++) { int numValues = randomIntBetween(0, 5); if (numValues == 0) { hasAllField = false; } List<Comparable<?>> values = new ArrayList<>(); for (int k = 0; k < numValues; k++) { values.add( possibleValues.get(j)[randomIntBetween(0, possibleValues.get(j).length - 1)]); if (types[j].clazz == Long.class) { long value = (Long) values.get(k); document.add(new SortedNumericDocValuesField(types[j].fieldType.name(), value)); document.add(new LongPoint(types[j].fieldType.name(), value)); } else if (types[j].clazz == Double.class) { document.add(new SortedNumericDocValuesField(types[j].fieldType.name(), NumericUtils.doubleToSortableLong((Double) values.get(k)))); } else if (types[j].clazz == BytesRef.class) { BytesRef value = (BytesRef) values.get(k); document.add(new SortedSetDocValuesField(types[j].fieldType.name(), (BytesRef) values.get(k))); document.add(new TextField(types[j].fieldType.name(), value.utf8ToString(), Field.Store.NO)); } else { assert (false); } } docValues.add(values); } if (hasAllField) { List<CompositeKey> comb = createListCombinations(docValues); keys.addAll(comb); } indexWriter.addDocument(document); } if (forceMerge) { indexWriter.forceMerge(1); } } IndexReader reader = DirectoryReader.open(directory); int size = randomIntBetween(1, keys.size()); SingleDimensionValuesSource<?>[] sources = new SingleDimensionValuesSource[types.length]; for (int i = 0; i < types.length; i++) { final MappedFieldType fieldType = types[i].fieldType; if (types[i].clazz == Long.class) { sources[i] = new LongValuesSource(bigArrays, fieldType, context -> context.reader().getSortedNumericDocValues(fieldType.name()), value -> value, DocValueFormat.RAW, size, 1); } else if (types[i].clazz == Double.class) { sources[i] = new DoubleValuesSource(bigArrays, fieldType, context -> FieldData.sortableLongBitsToDoubles( context.reader().getSortedNumericDocValues(fieldType.name())), size, 1); } else if (types[i].clazz == BytesRef.class) { if (forceMerge) { // we don't create global ordinals but we test this mode when the reader has a single segment // since ordinals are global in this case. sources[i] = new GlobalOrdinalValuesSource(bigArrays, fieldType, context -> context.reader().getSortedSetDocValues(fieldType.name()), size, 1); } else { sources[i] = new BinaryValuesSource(fieldType, context -> FieldData .toString(context.reader().getSortedSetDocValues(fieldType.name())), size, 1); } } else { assert (false); } } CompositeKey[] expected = keys.toArray(new CompositeKey[0]); Arrays.sort(expected, (a, b) -> compareKey(a, b)); CompositeValuesCollectorQueue queue = new CompositeValuesCollectorQueue(sources, size); final SortedDocsProducer docsProducer = sources[0].createSortedDocsProducerOrNull(reader, new MatchAllDocsQuery()); for (boolean withProducer : new boolean[] { true, false }) { if (withProducer && docsProducer == null) { continue; } int pos = 0; CompositeKey last = null; while (pos < size) { queue.clear(); if (last != null) { queue.setAfter(last.values()); } for (LeafReaderContext leafReaderContext : reader.leaves()) { final LeafBucketCollector leafCollector = new LeafBucketCollector() { @Override public void collect(int doc, long bucket) throws IOException { queue.addIfCompetitive(); } }; if (withProducer) { assertEquals(DocIdSet.EMPTY, docsProducer.processLeaf(new MatchAllDocsQuery(), queue, leafReaderContext, false)); } else { final LeafBucketCollector queueCollector = queue.getLeafCollector(leafReaderContext, leafCollector); final Bits liveDocs = leafReaderContext.reader().getLiveDocs(); for (int i = 0; i < leafReaderContext.reader().maxDoc(); i++) { if (liveDocs == null || liveDocs.get(i)) { queueCollector.collect(i); } } } } assertEquals(size, Math.min(queue.size(), expected.length - pos)); int ptr = 0; for (int slot : queue.getSortedSlot()) { CompositeKey key = queue.toCompositeKey(slot); assertThat(key, equalTo(expected[ptr++])); last = key; } pos += queue.size(); } } reader.close(); } }
From source file:org.elasticsearch.search.aggregations.bucket.composite.PointsSortedDocsProducer.java
License:Apache License
@Override
DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, LeafReaderContext context,
boolean fillDocIdSet) throws IOException {
final PointValues values = context.reader().getPointValues(field);
if (values == null) {
// no value for the field
return DocIdSet.EMPTY;
}/*from w w w. ja va2 s . co m*/
long lowerBucket = Long.MIN_VALUE;
Comparable<?> lowerValue = queue.getLowerValueLeadSource();
if (lowerValue != null) {
if (lowerValue.getClass() != Long.class) {
throw new IllegalStateException("expected Long, got " + lowerValue.getClass());
}
lowerBucket = (Long) lowerValue;
}
long upperBucket = Long.MAX_VALUE;
Comparable<?> upperValue = queue.getUpperValueLeadSource();
if (upperValue != null) {
if (upperValue.getClass() != Long.class) {
throw new IllegalStateException("expected Long, got " + upperValue.getClass());
}
upperBucket = (Long) upperValue;
}
DocIdSetBuilder builder = fillDocIdSet ? new DocIdSetBuilder(context.reader().maxDoc(), values, field)
: null;
Visitor visitor = new Visitor(context, queue, builder, values.getBytesPerDimension(), lowerBucket,
upperBucket);
try {
values.intersect(visitor);
visitor.flush();
} catch (CollectionTerminatedException exc) {
}
return fillDocIdSet ? builder.build() : DocIdSet.EMPTY;
}
From source file:org.elasticsearch.search.aggregations.bucket.composite.TermsSortedDocsProducer.java
License:Apache License
@Override
DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, LeafReaderContext context,
boolean fillDocIdSet) throws IOException {
final Terms terms = context.reader().terms(field);
if (terms == null) {
// no value for the field
return DocIdSet.EMPTY;
}/*from ww w . ja v a 2s . c om*/
BytesRef lowerValue = (BytesRef) queue.getLowerValueLeadSource();
BytesRef upperValue = (BytesRef) queue.getUpperValueLeadSource();
final TermsEnum te = terms.iterator();
if (lowerValue != null) {
if (te.seekCeil(lowerValue) == TermsEnum.SeekStatus.END) {
return DocIdSet.EMPTY;
}
} else {
if (te.next() == null) {
return DocIdSet.EMPTY;
}
}
DocIdSetBuilder builder = fillDocIdSet ? new DocIdSetBuilder(context.reader().maxDoc(), terms) : null;
PostingsEnum reuse = null;
boolean first = true;
do {
if (upperValue != null && upperValue.compareTo(te.term()) < 0) {
break;
}
reuse = te.postings(reuse, PostingsEnum.NONE);
if (processBucket(queue, context, reuse, te.term(), builder) && !first) {
// this bucket does not have any competitive composite buckets,
// we can early terminate the collection because the remaining buckets are guaranteed
// to be greater than this bucket.
break;
}
first = false;
} while (te.next() != null);
return fillDocIdSet ? builder.build() : DocIdSet.EMPTY;
}
From source file:org.hibernate.search.filter.impl.CachingWrapperQuery.java
License:LGPL
@Override public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { final Weight weight = query.createWeight(searcher, needsScores); if (needsScores) { // our cache is not sufficient, we need scores too return weight; }//from ww w.j av a 2 s. c o m return new ConstantScoreWeight(weight.getQuery()) { @Override public void extractTerms(Set<Term> terms) { weight.extractTerms(terms); } @Override public Scorer scorer(LeafReaderContext context) throws IOException { DocIdSet docIdSet = getDocIdSet(context); assert docIdSet != null; if (docIdSet == DocIdSet.EMPTY) { return null; } final DocIdSetIterator disi = docIdSet.iterator(); if (disi == null) { return null; } return new ConstantScoreScorer(this, 0f, disi); } private DocIdSet getDocIdSet(LeafReaderContext context) throws IOException { final LeafReader reader = context.reader(); final Object key = reader.getCoreCacheKey(); Object cached = cache.get(key); if (cached != null) { return (DocIdSet) cached; } synchronized (cache) { cached = cache.get(key); if (cached != null) { return (DocIdSet) cached; } final DocIdSet docIdSet; final Scorer scorer = weight.scorer(context); if (scorer == null) { docIdSet = DocIdSet.EMPTY; } else { docIdSet = cacheImpl(scorer.iterator(), reader); } cache.put(key, docIdSet); return docIdSet; } } }; }