Example usage for org.apache.lucene.search.grouping BlockGroupingCollector BlockGroupingCollector

List of usage examples for org.apache.lucene.search.grouping BlockGroupingCollector BlockGroupingCollector

Introduction

In this page you can find the example usage for org.apache.lucene.search.grouping BlockGroupingCollector BlockGroupingCollector.

Prototype

public BlockGroupingCollector(Sort groupSort, int topNGroups, boolean needsScores, Weight lastDocPerGroup) 

Source Link

Document

Create the single pass collector.

Usage

From source file:perf.SearchTask.java

License:Apache License

@Override
public void go(IndexState state) throws IOException {
    //System.out.println("go group=" + this.group + " single=" + singlePassGroup + " xxx=" + xxx + " this=" + this);
    final IndexSearcher searcher = state.mgr.acquire();

    //System.out.println("GO query=" + q);

    try {/*from  w  w w .  j  av a  2 s . c o m*/
        if (doHilite) {
            if (state.fastHighlighter != null) {
                fieldQuery = state.fastHighlighter.getFieldQuery(q, searcher.getIndexReader());
            } else if (state.useHighlighter) {
                highlighter = new Highlighter(new SimpleHTMLFormatter(), new QueryScorer(q));
            } else {
                // no setup for postingshighlighter
            }
        }

        if (group != null) {
            if (singlePassGroup) {
                final BlockGroupingCollector c = new BlockGroupingCollector(Sort.RELEVANCE, 10, true,
                        searcher.createNormalizedWeight(state.groupEndQuery, false));
                searcher.search(q, c);
                groupsResultBlock = c.getTopGroups(Sort.RELEVANCE, 0, 0, 10, true);

                if (doHilite) {
                    hilite(groupsResultBlock, state, searcher);
                }

            } else {
                //System.out.println("GB: " + group);
                final TermFirstPassGroupingCollector c1 = new TermFirstPassGroupingCollector(group,
                        Sort.RELEVANCE, 10);

                final Collector c;
                final TermAllGroupsCollector allGroupsCollector;
                // Turn off AllGroupsCollector for now -- it's very slow:
                if (false && doCountGroups) {
                    allGroupsCollector = new TermAllGroupsCollector(group);
                    //c = MultiCollector.wrap(allGroupsCollector, c1);
                    c = c1;
                } else {
                    allGroupsCollector = null;
                    c = c1;
                }

                searcher.search(q, c);

                final Collection<SearchGroup<BytesRef>> topGroups = c1.getTopGroups(0, true);
                if (topGroups != null) {
                    final TermSecondPassGroupingCollector c2 = new TermSecondPassGroupingCollector(group,
                            topGroups, Sort.RELEVANCE, Sort.RELEVANCE, 10, true, true, true);
                    searcher.search(q, c2);
                    groupsResultTerms = c2.getTopGroups(0);
                    if (allGroupsCollector != null) {
                        groupsResultTerms = new TopGroups<BytesRef>(groupsResultTerms,
                                allGroupsCollector.getGroupCount());
                    }
                    if (doHilite) {
                        hilite(groupsResultTerms, state, searcher);
                    }
                }
            }
        } else if (!facetRequests.isEmpty()) {
            // TODO: support sort, filter too!!
            // TODO: support other facet methods
            if (doDrillSideways) {
                // nocommit todo
                hits = null;
                facetResults = null;
            } else {
                facetResults = new ArrayList<FacetResult>();
                FacetsCollector fc = new FacetsCollector();
                hits = FacetsCollector.search(searcher, q, 10, fc);
                long t0 = System.nanoTime();

                Facets mainFacets = null;
                for (String request : facetRequests) {
                    if (request.startsWith("range:")) {
                        int i = request.indexOf(':', 6);
                        if (i == -1) {
                            throw new IllegalArgumentException("range facets request \"" + request
                                    + "\" is missing field; should be range:field:0-10,10-20");
                        }
                        String field = request.substring(6, i);
                        String[] rangeStrings = request.substring(i + 1, request.length()).split(",");
                        LongRange[] ranges = new LongRange[rangeStrings.length];
                        for (int rangeIDX = 0; rangeIDX < ranges.length; rangeIDX++) {
                            String rangeString = rangeStrings[rangeIDX];
                            int j = rangeString.indexOf('-');
                            if (j == -1) {
                                throw new IllegalArgumentException(
                                        "range facets request should be X-Y; got: " + rangeString);
                            }
                            long start = Long.parseLong(rangeString.substring(0, j));
                            long end = Long.parseLong(rangeString.substring(j + 1));
                            ranges[rangeIDX] = new LongRange(rangeString, start, true, end, true);
                        }
                        LongRangeFacetCounts facets = new LongRangeFacetCounts(field, fc, ranges);
                        facetResults.add(facets.getTopChildren(ranges.length, field));
                    } else {
                        Facets facets = new FastTaxonomyFacetCounts(state.taxoReader, state.facetsConfig, fc);
                        facetResults.add(facets.getTopChildren(10, request));
                    }
                }
                getFacetResultsMsec = (System.nanoTime() - t0) / 1000000.0;
            }
        } else if (s == null) {
            hits = searcher.search(q, topN);
            if (doHilite) {
                hilite(hits, state, searcher, q);
            }
        } else {
            hits = searcher.search(q, topN, s);
            if (doHilite) {
                hilite(hits, state, searcher, q);
            }
            /*
              final boolean fillFields = true;
              final boolean fieldSortDoTrackScores = true;
              final boolean fieldSortDoMaxScore = true;
              final TopFieldCollector c = TopFieldCollector.create(s, topN,
              fillFields,
              fieldSortDoTrackScores,
              fieldSortDoMaxScore,
              false);
              searcher.search(q, c);
              hits = c.topDocs();
            */
        }
        if (hits != null) {
            totalHitCount = hits.totalHits;

            if (doStoredLoads) {
                for (int i = 0; i < hits.scoreDocs.length; i++) {
                    ScoreDoc scoreDoc = hits.scoreDocs[i];
                    searcher.doc(scoreDoc.doc);
                }
            }

        } else if (groupsResultBlock != null) {
            totalHitCount = groupsResultBlock.totalHitCount;
        }
    } catch (Throwable t) {
        System.out.println("EXC: " + q);
        throw new RuntimeException(t);
        //System.out.println("TE: " + TermsEnum.getStats());
    } finally {
        state.mgr.release(searcher);
        fieldQuery = null;
        highlighter = null;
    }
}