Example usage for org.apache.lucene.search IndexSearcher rewrite

List of usage examples for org.apache.lucene.search IndexSearcher rewrite

Introduction

In this page you can find the example usage for org.apache.lucene.search IndexSearcher rewrite.

Prototype

public Query rewrite(Query original) throws IOException 

Source Link

Document

Expert: called to re-write queries into primitive queries.

Usage

From source file:net.conquiris.search.AbstractSearcher.java

License:Apache License

public final Query rewrite(final Query query) {
    return perform(new Op<Query>() {
        @Override/*from  ww  w.  j  a  va  2s. c o  m*/
        Query perform(IndexSearcher searcher) throws Exception {
            return searcher.rewrite(query);
        }
    });
}

From source file:net.conquiris.search.AbstractSearcher.java

License:Apache License

public final <T> ItemResult<T> getFirst(final HitMapper<T> mapper, final Query query,
        final @Nullable Filter filter, final @Nullable Sort sort, final @Nullable Highlight highlight) {
    return perform(new Op<ItemResult<T>>() {
        public ItemResult<T> perform(IndexSearcher searcher) throws Exception {
            Stopwatch w = Stopwatch.createStarted();
            Query rewritten = searcher.rewrite(query);
            TopDocs docs = getTopDocs(searcher, query, filter, sort, 1);
            if (docs.totalHits > 0) {
                ScoreDoc sd = docs.scoreDocs[0];
                HighlightedQuery highlighted = MoreObjects.firstNonNull(highlight, Highlight.no())
                        .highlight(rewritten);
                float score = sd.score;
                T item = map(searcher, sd, highlighted, mapper);
                return ItemResult.found(docs.totalHits, score, w.elapsed(TimeUnit.MILLISECONDS), item);
            } else {
                return ItemResult.notFound(w.elapsed(TimeUnit.MILLISECONDS));
            }/* ww  w  . j  a v a 2 s .  co  m*/
        }
    });
}

From source file:net.conquiris.search.AbstractSearcher.java

License:Apache License

public final <T> PageResult<T> getPage(final HitMapper<T> mapper, final Query query, final int firstRecord,
        final int maxRecords, final @Nullable Filter filter, final @Nullable Sort sort,
        final @Nullable Highlight highlight) {

    // Corner case
    if (maxRecords < 1) {
        CountResult r = getCount(query, filter, true);
        return PageResult.notFound(r.getTotalHits(), r.getMaxScore(), r.getTime(), firstRecord);
    }//from   w ww . j a va  2 s . co m

    // Normal operation
    return perform(new Op<PageResult<T>>() {
        public PageResult<T> perform(IndexSearcher searcher) throws Exception {
            Stopwatch w = Stopwatch.createStarted();
            int total = firstRecord + maxRecords;
            Query rewritten = searcher.rewrite(query);
            TopDocs docs = getTopDocs(searcher, rewritten, filter, sort, total);
            if (docs.totalHits > 0) {
                int n = Math.min(total, docs.scoreDocs.length);
                float score = docs.getMaxScore();
                if (n > firstRecord) {
                    final List<T> items = Lists.newArrayListWithCapacity(n - firstRecord);
                    HighlightedQuery highlighted = MoreObjects.firstNonNull(highlight, Highlight.no())
                            .highlight(rewritten);
                    for (int i = firstRecord; i < n; i++) {
                        ScoreDoc sd = docs.scoreDocs[i];
                        T item = map(searcher, sd, highlighted, mapper);
                        items.add(item);
                    }
                    return PageResult.found(docs.totalHits, score, w.elapsed(TimeUnit.MILLISECONDS),
                            firstRecord, items);
                } else {
                    return PageResult.notFound(docs.totalHits, score, w.elapsed(TimeUnit.MILLISECONDS),
                            firstRecord);
                }
            } else {
                return PageResult.notFound(w.elapsed(TimeUnit.MILLISECONDS), firstRecord);
            }
        }
    });
}

From source file:net.sf.katta.lib.lucene.LuceneServer.java

License:Apache License

/**
 * Rewrites a query for the given shards
 * /*from   w w w .j a v  a2 s  .com*/
 * @param original
 * @param shardNames
 * @return
 * @throws IOException
 */
protected Query rewrite(final Query original, final String[] shardNames) throws IOException {
    final Query[] queries = new Query[shardNames.length];
    for (int i = 0; i < shardNames.length; i++) {
        final String shard = shardNames[i];
        final SearcherHandle handle = getSearcherHandleByShard(shard);
        IndexSearcher searcher = handle.getSearcher();
        try {
            if (searcher == null) {
                throw new IllegalStateException(
                        "no index-server for shard '" + shard + "' found - probably undeployed");
            } else {
                queries[i] = searcher.rewrite(original);
            }
        } finally {
            handle.finishSearcher();
        }
    }
    if (queries.length > 0 && queries[0] != null) {
        return queries[0].combine(queries);
    } else {
        LOG.error("No queries available for shards: " + Arrays.toString(shardNames));
    }
    return original;
}

From source file:org.elasticsearch.action.admin.indices.validate.query.TransportValidateQueryAction.java

License:Apache License

private String getRewrittenQuery(IndexSearcher searcher, Query query) throws IOException {
    Query queryRewrite = searcher.rewrite(query);
    if (queryRewrite instanceof MatchNoDocsQuery) {
        return query.toString();
    } else {/*from   ww w .  ja  va2 s .  co m*/
        return queryRewrite.toString();
    }
}

From source file:org.elasticsearch.index.search.child.ChildrenConstantScoreQuery.java

License:Apache License

@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
    SearchContext searchContext = SearchContext.current();
    BytesRefHash parentIds = new BytesRefHash(512, searchContext.pageCacheRecycler());
    ParentIdCollector collector = new ParentIdCollector(parentType, parentChildIndexFieldData, parentIds);
    final Query childQuery;
    if (rewrittenChildQuery == null) {
        childQuery = rewrittenChildQuery = searcher.rewrite(originalChildQuery);
    } else {/*from ww w  .j  a  v  a  2  s .  c  o  m*/
        assert rewriteIndexReader == searcher.getIndexReader();
        childQuery = rewrittenChildQuery;
    }
    IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
    indexSearcher.setSimilarity(searcher.getSimilarity());
    indexSearcher.search(childQuery, collector);

    long remaining = parentIds.size();
    if (remaining == 0) {
        return Queries.newMatchNoDocsQuery().createWeight(searcher);
    }

    Filter shortCircuitFilter = null;
    if (remaining == 1) {
        BytesRef id = parentIds.get(0, new BytesRef());
        shortCircuitFilter = new TermFilter(
                new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id)));
    } else if (remaining <= shortCircuitParentDocSet) {
        shortCircuitFilter = new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds);
    }

    ParentWeight parentWeight = new ParentWeight(parentFilter, shortCircuitFilter, parentIds);
    searchContext.addReleasable(parentWeight);
    return parentWeight;
}

From source file:org.elasticsearch.index.search.child.ChildrenQuery.java

License:Apache License

@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
    SearchContext searchContext = SearchContext.current();

    final Query childQuery;
    if (rewrittenChildQuery == null) {
        childQuery = rewrittenChildQuery = searcher.rewrite(originalChildQuery);
    } else {/*from   w ww. ja  va  2 s.c  o  m*/
        assert rewriteIndexReader == searcher.getIndexReader();
        childQuery = rewrittenChildQuery;
    }
    IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
    indexSearcher.setSimilarity(searcher.getSimilarity());

    final BytesRefHash parentIds;
    final FloatArray scores;
    final IntArray occurrences;
    switch (scoreType) {
    case MAX:
        MaxCollector maxCollector = new MaxCollector(parentChildIndexFieldData, parentType, searchContext);
        indexSearcher.search(childQuery, maxCollector);
        parentIds = maxCollector.parentIds;
        scores = maxCollector.scores;
        occurrences = null;
        Releasables.release(maxCollector.parentIdsIndex);
        break;
    case SUM:
        SumCollector sumCollector = new SumCollector(parentChildIndexFieldData, parentType, searchContext);
        indexSearcher.search(childQuery, sumCollector);
        parentIds = sumCollector.parentIds;
        scores = sumCollector.scores;
        occurrences = null;
        Releasables.release(sumCollector.parentIdsIndex);
        break;
    case AVG:
        AvgCollector avgCollector = new AvgCollector(parentChildIndexFieldData, parentType, searchContext);
        indexSearcher.search(childQuery, avgCollector);
        parentIds = avgCollector.parentIds;
        scores = avgCollector.scores;
        occurrences = avgCollector.occurrences;
        Releasables.release(avgCollector.parentIdsIndex);
        break;
    default:
        throw new RuntimeException("Are we missing a score type here? -- " + scoreType);
    }

    int size = (int) parentIds.size();
    if (size == 0) {
        Releasables.release(parentIds, scores, occurrences);
        return Queries.newMatchNoDocsQuery().createWeight(searcher);
    }

    final Filter parentFilter;
    if (size == 1) {
        BytesRef id = parentIds.get(0, new BytesRef());
        if (nonNestedDocsFilter != null) {
            List<Filter> filters = Arrays.asList(
                    new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))),
                    nonNestedDocsFilter);
            parentFilter = new AndFilter(filters);
        } else {
            parentFilter = new TermFilter(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id)));
        }
    } else if (size <= shortCircuitParentDocSet) {
        parentFilter = new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds);
    } else {
        parentFilter = new ApplyAcceptedDocsFilter(this.parentFilter);
    }
    ParentWeight parentWeight = new ParentWeight(rewrittenChildQuery.createWeight(searcher), parentFilter, size,
            parentIds, scores, occurrences);
    searchContext.addReleasable(parentWeight);
    return parentWeight;
}

From source file:org.elasticsearch.index.search.child.ParentQuery.java

License:Apache License

@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
    SearchContext searchContext = SearchContext.current();
    ParentIdAndScoreCollector collector = new ParentIdAndScoreCollector(searchContext,
            parentChildIndexFieldData, parentType);

    final Query parentQuery;
    if (rewrittenParentQuery == null) {
        parentQuery = rewrittenParentQuery = searcher.rewrite(originalParentQuery);
    } else {//  w w  w.ja v  a2s.c o m
        assert rewriteIndexReader == searcher.getIndexReader();
        parentQuery = rewrittenParentQuery;
    }
    IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
    indexSearcher.setSimilarity(searcher.getSimilarity());
    indexSearcher.search(parentQuery, collector);
    FloatArray scores = collector.scores;
    BytesRefHash parentIds = collector.parentIds;

    if (parentIds.size() == 0) {
        Releasables.release(parentIds, scores);
        return Queries.newMatchNoDocsQuery().createWeight(searcher);
    }

    ChildWeight childWeight = new ChildWeight(searchContext, parentQuery.createWeight(searcher), childrenFilter,
            parentIds, scores);
    searchContext.addReleasable(childWeight);
    return childWeight;
}

From source file:org.elasticsearch.index.search.child.TopChildrenQuery.java

License:Apache License

@Override
public Weight createWeight(IndexSearcher searcher) throws IOException {
    Recycler.V<ObjectObjectOpenHashMap<Object, ParentDoc[]>> parentDocs = cacheRecycler.hashMap(-1);
    SearchContext searchContext = SearchContext.current();

    int parentHitsResolved;
    int requestedDocs = (searchContext.from() + searchContext.size());
    if (requestedDocs <= 0) {
        requestedDocs = 1;//from www . ja v  a2 s.  co m
    }
    int numChildDocs = requestedDocs * factor;

    Query childQuery;
    if (rewrittenChildQuery == null) {
        childQuery = rewrittenChildQuery = searcher.rewrite(originalChildQuery);
    } else {
        assert rewriteIndexReader == searcher.getIndexReader();
        childQuery = rewrittenChildQuery;
    }

    IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
    indexSearcher.setSimilarity(searcher.getSimilarity());
    while (true) {
        parentDocs.v().clear();
        TopDocs topChildDocs = indexSearcher.search(childQuery, numChildDocs);
        try {
            parentHitsResolved = resolveParentDocuments(topChildDocs, searchContext, parentDocs);
        } catch (Exception e) {
            throw new IOException(e);
        }

        // check if we found enough docs, if so, break
        if (parentHitsResolved >= requestedDocs) {
            break;
        }
        // if we did not find enough docs, check if it make sense to search further
        if (topChildDocs.totalHits <= numChildDocs) {
            break;
        }
        // if not, update numDocs, and search again
        numChildDocs *= incrementalFactor;
        if (numChildDocs > topChildDocs.totalHits) {
            numChildDocs = topChildDocs.totalHits;
        }
    }

    ParentWeight parentWeight = new ParentWeight(rewrittenChildQuery.createWeight(searcher), parentDocs);
    searchContext.addReleasable(parentWeight);
    return parentWeight;
}

From source file:org.elasticsearch.search.aggregations.AggregatorTestCase.java

License:Apache License

/**
 * Divides the provided {@link IndexSearcher} in sub-searcher, one for each segment,
 * builds an aggregator for each sub-searcher filtered by the provided {@link Query} and
 * returns the reduced {@link InternalAggregation}.
 *//* w  ww . j ava2s.c  om*/
protected <A extends InternalAggregation, C extends Aggregator> A searchAndReduce(IndexSearcher searcher,
        Query query, AggregationBuilder builder, MappedFieldType... fieldTypes) throws IOException {
    final IndexReaderContext ctx = searcher.getTopReaderContext();

    final ShardSearcher[] subSearchers;
    if (ctx instanceof LeafReaderContext) {
        subSearchers = new ShardSearcher[1];
        subSearchers[0] = new ShardSearcher((LeafReaderContext) ctx, ctx);
    } else {
        final CompositeReaderContext compCTX = (CompositeReaderContext) ctx;
        final int size = compCTX.leaves().size();
        subSearchers = new ShardSearcher[size];
        for (int searcherIDX = 0; searcherIDX < subSearchers.length; searcherIDX++) {
            final LeafReaderContext leave = compCTX.leaves().get(searcherIDX);
            subSearchers[searcherIDX] = new ShardSearcher(leave, compCTX);
        }
    }

    List<InternalAggregation> aggs = new ArrayList<>();
    Query rewritten = searcher.rewrite(query);
    Weight weight = searcher.createWeight(rewritten, true);
    C root = createAggregator(builder, searcher, fieldTypes);
    try {
        for (ShardSearcher subSearcher : subSearchers) {
            C a = createAggregator(builder, subSearcher, fieldTypes);
            a.preCollection();
            subSearcher.search(weight, a);
            a.postCollection();
            aggs.add(a.buildAggregation(0L));
        }
        if (aggs.isEmpty()) {
            return null;
        } else {
            if (randomBoolean()) {
                // sometimes do an incremental reduce
                List<InternalAggregation> internalAggregations = randomSubsetOf(
                        randomIntBetween(1, aggs.size()), aggs);
                A internalAgg = (A) aggs.get(0).doReduce(internalAggregations,
                        new InternalAggregation.ReduceContext(root.context().bigArrays(), null, false));
                aggs.removeAll(internalAggregations);
                aggs.add(internalAgg);
            }
            // now do the final reduce
            @SuppressWarnings("unchecked")
            A internalAgg = (A) aggs.get(0).doReduce(aggs,
                    new InternalAggregation.ReduceContext(root.context().bigArrays(), null, true));
            return internalAgg;
        }
    } finally {
        Releasables.close(releasables);
        releasables.clear();
    }
}