Example usage for org.apache.lucene.util BitSet of

List of usage examples for org.apache.lucene.util BitSet of

Introduction

In this page you can find the example usage for org.apache.lucene.util BitSet of.

Prototype

public static BitSet of(DocIdSetIterator it, int maxDoc) throws IOException 

Source Link

Document

Build a BitSet from the content of the provided DocIdSetIterator .

Usage

From source file:org.codelibs.elasticsearch.common.lucene.index.FilterableTermsEnum.java

License:Apache License

public FilterableTermsEnum(IndexReader reader, String field, int docsEnumFlag, @Nullable Query filter)
        throws IOException {
    if ((docsEnumFlag != PostingsEnum.FREQS) && (docsEnumFlag != PostingsEnum.NONE)) {
        throw new IllegalArgumentException("invalid docsEnumFlag of " + docsEnumFlag);
    }//w  w  w.j a  v a 2s  .c  o m
    this.docsEnumFlag = docsEnumFlag;
    List<LeafReaderContext> leaves = reader.leaves();
    List<Holder> enums = new ArrayList<>(leaves.size());
    final Weight weight;
    if (filter == null) {
        weight = null;
    } else {
        final IndexSearcher searcher = new IndexSearcher(reader);
        searcher.setQueryCache(null);
        weight = searcher.createNormalizedWeight(filter, false);
    }
    for (LeafReaderContext context : leaves) {
        Terms terms = context.reader().terms(field);
        if (terms == null) {
            continue;
        }
        TermsEnum termsEnum = terms.iterator();
        if (termsEnum == null) {
            continue;
        }
        BitSet bits = null;
        if (weight != null) {
            Scorer scorer = weight.scorer(context);
            if (scorer == null) {
                // fully filtered, none matching, no need to iterate on this
                continue;
            }
            DocIdSetIterator docs = scorer.iterator();

            // we want to force apply deleted docs
            final Bits liveDocs = context.reader().getLiveDocs();
            if (liveDocs != null) {
                docs = new FilteredDocIdSetIterator(docs) {
                    @Override
                    protected boolean match(int doc) {
                        return liveDocs.get(doc);
                    }
                };
            }

            bits = BitSet.of(docs, context.reader().maxDoc());
        }
        enums.add(new Holder(termsEnum, bits));
    }
    this.enums = enums.toArray(new Holder[enums.size()]);
}

From source file:org.elasticsearch.index.cache.bitset.BitsetFilterCache.java

License:Apache License

private BitSet getAndLoadIfNotPresent(final Query query, final LeafReaderContext context)
        throws IOException, ExecutionException {
    final Object coreCacheReader = context.reader().getCoreCacheKey();
    final ShardId shardId = ShardUtils.extractShardId(context.reader());
    if (shardId != null // can't require it because of the percolator
            && index.getName().equals(shardId.getIndex()) == false) {
        // insanity
        throw new IllegalStateException("Trying to load bit set for index [" + shardId.getIndex()
                + "] with cache of index [" + index.getName() + "]");
    }//from   ww  w. j a  v  a 2  s.  c  o m
    Cache<Query, Value> filterToFbs = loadedFilters.get(coreCacheReader, new Callable<Cache<Query, Value>>() {
        @Override
        public Cache<Query, Value> call() throws Exception {
            context.reader().addCoreClosedListener(BitsetFilterCache.this);
            return CacheBuilder.newBuilder().build();
        }
    });
    return filterToFbs.get(query, new Callable<Value>() {
        @Override
        public Value call() throws Exception {
            final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(context);
            final IndexSearcher searcher = new IndexSearcher(topLevelContext);
            searcher.setQueryCache(null);
            final Weight weight = searcher.createNormalizedWeight(query, false);
            final Scorer s = weight.scorer(context);
            final BitSet bitSet;
            if (s == null) {
                bitSet = null;
            } else {
                bitSet = BitSet.of(s.iterator(), context.reader().maxDoc());
            }

            Value value = new Value(bitSet, shardId);
            listener.onCache(shardId, value.bitset);
            return value;
        }
    }).bitset;
}

From source file:org.elasticsearch.index.shard.ShardSplittingQuery.java

License:Apache License

private static BitSetProducer newParentDocBitSetProducer(Version indexVersionCreated) {
    return context -> {
        Query query = Queries.newNonNestedFilter(indexVersionCreated);
        final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(context);
        final IndexSearcher searcher = new IndexSearcher(topLevelContext);
        searcher.setQueryCache(null);// w  w  w. j  a va  2  s .  c o  m
        final Weight weight = searcher.createNormalizedWeight(query, false);
        Scorer s = weight.scorer(context);
        return s == null ? null : BitSet.of(s.iterator(), context.reader().maxDoc());
    };
}

From source file:org.elasticsearch.percolator.PercolatorMatchedSlotSubFetchPhase.java

License:Apache License

@Override
public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException {
    List<PercolateQuery> percolateQueries = locatePercolatorQuery(context.query());
    if (percolateQueries.isEmpty()) {
        return;//from  w w  w .ja v a 2  s.com
    }

    boolean singlePercolateQuery = percolateQueries.size() == 1;
    for (PercolateQuery percolateQuery : percolateQueries) {
        String fieldName = singlePercolateQuery ? FIELD_NAME_PREFIX
                : FIELD_NAME_PREFIX + "_" + percolateQuery.getName();
        IndexSearcher percolatorIndexSearcher = percolateQuery.getPercolatorIndexSearcher();
        Weight weight = percolatorIndexSearcher.createNormalizedWeight(Queries.newNonNestedFilter(), false);
        Scorer s = weight.scorer(percolatorIndexSearcher.getIndexReader().leaves().get(0));
        int memoryIndexMaxDoc = percolatorIndexSearcher.getIndexReader().maxDoc();
        BitSet rootDocs = BitSet.of(s.iterator(), memoryIndexMaxDoc);
        int[] rootDocsBySlot = null;
        boolean hasNestedDocs = rootDocs.cardinality() != percolatorIndexSearcher.getIndexReader().numDocs();
        if (hasNestedDocs) {
            rootDocsBySlot = buildRootDocsSlots(rootDocs);
        }

        PercolateQuery.QueryStore queryStore = percolateQuery.getQueryStore();
        List<LeafReaderContext> ctxs = context.searcher().getIndexReader().leaves();
        for (SearchHit hit : hits) {
            LeafReaderContext ctx = ctxs.get(ReaderUtil.subIndex(hit.docId(), ctxs));
            int segmentDocId = hit.docId() - ctx.docBase;
            Query query = queryStore.getQueries(ctx).apply(segmentDocId);

            TopDocs topDocs = percolatorIndexSearcher.search(query, memoryIndexMaxDoc,
                    new Sort(SortField.FIELD_DOC));
            if (topDocs.totalHits == 0) {
                // This hit didn't match with a percolate query,
                // likely to happen when percolating multiple documents
                continue;
            }

            Map<String, DocumentField> fields = hit.fieldsOrNull();
            if (fields == null) {
                fields = new HashMap<>();
                hit.fields(fields);
            }
            IntStream slots = convertTopDocsToSlots(topDocs, rootDocsBySlot);
            fields.put(fieldName, new DocumentField(fieldName, slots.boxed().collect(Collectors.toList())));
        }
    }
}