Example usage for org.apache.lucene.search MultiCollector wrap

List of usage examples for org.apache.lucene.search MultiCollector wrap

Introduction

In this page you can find the example usage for org.apache.lucene.search MultiCollector wrap.

Prototype

public static Collector wrap(Iterable<? extends Collector> collectors) 

Source Link

Document

Wraps a list of Collector s with a MultiCollector .

Usage

From source file:com.qwazr.search.index.QueryCollectors.java

License:Apache License

private final Collector getFinalCollector() {
    switch (collectors.size()) {
    case 0:/*from   www.  ja va 2s.c  o m*/
        return null;
    case 1:
        return collectors.get(0);
    default:
        return MultiCollector.wrap(collectors);
    }
}

From source file:com.tripod.lucene.service.AbstractLuceneService.java

License:Apache License

/**
 * Common logic for sub-classes to perform searches.
 *
 * @param query the query/*from   w  ww  .j a v  a  2 s  .  c o m*/
 * @return the QueryResults
 * @throws QueryException if an error occurred performing the search
 */
protected LuceneQueryResults<QR> performSearch(Q query) throws QueryException {
    IndexSearcher searcher = null;
    try {
        // Acquire an IndexSearcher
        searcher = searcherManager.acquire();

        // Start the results builder with the offset and rows from the query
        final LuceneQueryResults.Builder<QR> resultsBuilder = new LuceneQueryResults.Builder<QR>()
                .pageSize(query.getRows());

        // Create a searcher and get a Lucene query
        final Query luceneQuery = queryTransformer.transform(query);

        // Get the return fields
        final Set<String> fieldsToLoad = new HashSet<>();
        if (query.getReturnFields() != null) {
            query.getReturnFields().stream().forEach(f -> fieldsToLoad.add(f.getName()));
        }

        // Get the facet fields
        final Set<String> facetFields = new HashSet<>();
        if (query.getFacetFields() != null) {
            query.getFacetFields().stream().forEach(f -> facetFields.add(f.getName()));
        }

        final Sort sort = getSort(query.getSorts());
        final Highlighter highlighter = getHighlighter(query, luceneQuery);

        // Collector to use when faceting
        final FacetsCollector facetsCollector = new FacetsCollector();

        // Collector for sorted/paged results
        final TopFieldCollector topFieldCollector = TopFieldCollector.create(sort, query.getRows(),
                (FieldDoc) query.getAfterDoc(), true, false, false);

        // Wrapped collector depending on whether faceting or not
        final Collector collector = facetFields.isEmpty() ? MultiCollector.wrap(topFieldCollector)
                : MultiCollector.wrap(topFieldCollector, facetsCollector);

        // Perform the Lucene query
        final long startTime = System.currentTimeMillis();
        FacetsCollector.searchAfter(searcher, query.getAfterDoc(), luceneQuery, query.getRows(), sort,
                collector);
        LOGGER.debug("Query executed in " + (System.currentTimeMillis() - startTime));

        // Transform each Lucene Document to a QueryResult
        ScoreDoc afterDoc = null;
        for (ScoreDoc scoreDoc : topFieldCollector.topDocs().scoreDocs) {
            final Document doc = getDoc(searcher, scoreDoc.doc, fieldsToLoad);
            final QR result = documentTransformer.transform(doc);
            performHighlighting(searcher, query, scoreDoc, doc, highlighter, result);

            resultsBuilder.addResult(result);
            afterDoc = scoreDoc;
        }

        // Get faceting results
        processFacetResults(searcher, facetsCollector, facetFields, resultsBuilder);

        // Store the last ScoreDoc so it can be passed back for the next page
        resultsBuilder.afterDoc(afterDoc);
        resultsBuilder.totalResults(topFieldCollector.getTotalHits());
        return resultsBuilder.build();

    } catch (TransformException e) {
        throw new QueryException("A transform error occurred");
    } catch (IOException | InvalidTokenOffsetsException e) {
        throw new QueryException("Unexpected error occurred performing query", e);
    } finally {
        if (searcher != null) {
            try {
                searcherManager.release(searcher);
            } catch (IOException e) {
                LOGGER.warn("Error releasing IndexSearcher: " + e.getMessage(), e);
            }
            searcher = null;
        }
    }
}

From source file:org.apache.solr.search.grouping.CommandHandler.java

License:Apache License

@SuppressWarnings("unchecked")
public void execute() throws IOException {
    final int nrOfCommands = commands.size();
    List<Collector> collectors = new ArrayList<Collector>(nrOfCommands);
    for (Command command : commands) {
        collectors.addAll(command.create());
    }// w w w.j a  v a2s.co  m

    ProcessedFilter filter = searcher.getProcessedFilter(queryCommand.getFilter(),
            queryCommand.getFilterList());
    Query query = QueryUtils.makeQueryable(queryCommand.getQuery());

    if (truncateGroups) {
        docSet = computeGroupedDocSet(query, filter, collectors);
    } else if (needDocset) {
        docSet = computeDocSet(query, filter, collectors);
    } else if (!collectors.isEmpty()) {
        searchWithTimeLimiter(query, filter,
                MultiCollector.wrap(collectors.toArray(new Collector[nrOfCommands])));
    } else {
        searchWithTimeLimiter(query, filter, null);
    }
}

From source file:org.apache.solr.search.grouping.CommandHandler.java

License:Apache License

private DocSet computeGroupedDocSet(Query query, ProcessedFilter filter, List<Collector> collectors)
        throws IOException {
    Command firstCommand = commands.get(0);
    AbstractAllGroupHeadsCollector termAllGroupHeadsCollector = TermAllGroupHeadsCollector
            .create(firstCommand.getKey(), firstCommand.getSortWithinGroup());
    if (collectors.isEmpty()) {
        searchWithTimeLimiter(query, filter, termAllGroupHeadsCollector);
    } else {//w w w  .  ja  va2s.c  o  m
        collectors.add(termAllGroupHeadsCollector);
        searchWithTimeLimiter(query, filter,
                MultiCollector.wrap(collectors.toArray(new Collector[collectors.size()])));
    }

    int maxDoc = searcher.maxDoc();
    long[] bits = termAllGroupHeadsCollector.retrieveGroupHeads(maxDoc).getBits();
    return new BitDocSet(new OpenBitSet(bits, bits.length));
}

From source file:org.apache.solr.search.grouping.CommandHandler.java

License:Apache License

private DocSet computeDocSet(Query query, ProcessedFilter filter, List<Collector> collectors)
        throws IOException {
    int maxDoc = searcher.maxDoc();
    DocSetCollector docSetCollector;//w  ww.  j a v  a  2s  .  co  m
    if (collectors.isEmpty()) {
        docSetCollector = new DocSetCollector(maxDoc >> 6, maxDoc);
    } else {
        Collector wrappedCollectors = MultiCollector.wrap(collectors.toArray(new Collector[collectors.size()]));
        docSetCollector = new DocSetDelegateCollector(maxDoc >> 6, maxDoc, wrappedCollectors);
    }
    searchWithTimeLimiter(query, filter, docSetCollector);
    return docSetCollector.getDocSet();
}

From source file:org.apache.solr.search.Grouping.java

License:Apache License

public void execute() throws IOException {
    if (commands.isEmpty()) {
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                "Specify at least one field, function or query to group by.");
    }/*from   ww  w . j  a va2  s  . c o  m*/

    DocListAndSet out = new DocListAndSet();
    qr.setDocListAndSet(out);

    SolrIndexSearcher.ProcessedFilter pf = searcher.getProcessedFilter(cmd.getFilter(), cmd.getFilterList());
    final Filter luceneFilter = pf.filter;
    maxDoc = searcher.maxDoc();

    needScores = (cmd.getFlags() & SolrIndexSearcher.GET_SCORES) != 0;
    boolean cacheScores = false;
    // NOTE: Change this when groupSort can be specified per group
    if (!needScores && !commands.isEmpty()) {
        if (commands.get(0).groupSort == null) {
            cacheScores = true;
        } else {
            for (SortField field : commands.get(0).groupSort.getSort()) {
                if (field.getType() == SortField.Type.SCORE) {
                    cacheScores = true;
                    break;
                }
            }
        }
    } else if (needScores) {
        cacheScores = needScores;
    }
    getDocSet = (cmd.getFlags() & SolrIndexSearcher.GET_DOCSET) != 0;
    getDocList = (cmd.getFlags() & SolrIndexSearcher.GET_DOCLIST) != 0;
    query = QueryUtils.makeQueryable(cmd.getQuery());

    for (Command cmd : commands) {
        cmd.prepare();
    }

    AbstractAllGroupHeadsCollector<?> allGroupHeadsCollector = null;
    List<Collector> collectors = new ArrayList<Collector>(commands.size());
    for (Command cmd : commands) {
        Collector collector = cmd.createFirstPassCollector();
        if (collector != null) {
            collectors.add(collector);
        }
        if (getGroupedDocSet && allGroupHeadsCollector == null) {
            collectors.add(allGroupHeadsCollector = cmd.createAllGroupCollector());
        }
    }

    Collector allCollectors = MultiCollector.wrap(collectors.toArray(new Collector[collectors.size()]));
    DocSetCollector setCollector = null;
    if (getDocSet && allGroupHeadsCollector == null) {
        setCollector = new DocSetDelegateCollector(maxDoc >> 6, maxDoc, allCollectors);
        allCollectors = setCollector;
    }

    CachingCollector cachedCollector = null;
    if (cacheSecondPassSearch && allCollectors != null) {
        int maxDocsToCache = (int) Math.round(maxDoc * (maxDocsPercentageToCache / 100.0d));
        // Only makes sense to cache if we cache more than zero.
        // Maybe we should have a minimum and a maximum, that defines the window we would like caching for.
        if (maxDocsToCache > 0) {
            allCollectors = cachedCollector = CachingCollector.create(allCollectors, cacheScores,
                    maxDocsToCache);
        }
    }

    if (pf.postFilter != null) {
        pf.postFilter.setLastDelegate(allCollectors);
        allCollectors = pf.postFilter;
    }

    if (allCollectors != null) {
        searchWithTimeLimiter(luceneFilter, allCollectors);
    }

    if (getGroupedDocSet && allGroupHeadsCollector != null) {
        FixedBitSet fixedBitSet = allGroupHeadsCollector.retrieveGroupHeads(maxDoc);
        long[] bits = fixedBitSet.getBits();
        OpenBitSet openBitSet = new OpenBitSet(bits, bits.length);
        qr.setDocSet(new BitDocSet(openBitSet));
    } else if (getDocSet) {
        qr.setDocSet(setCollector.getDocSet());
    }

    collectors.clear();
    for (Command cmd : commands) {
        Collector collector = cmd.createSecondPassCollector();
        if (collector != null)
            collectors.add(collector);
    }

    if (!collectors.isEmpty()) {
        Collector secondPhaseCollectors = MultiCollector
                .wrap(collectors.toArray(new Collector[collectors.size()]));
        if (collectors.size() > 0) {
            if (cachedCollector != null) {
                if (cachedCollector.isCached()) {
                    cachedCollector.replay(secondPhaseCollectors);
                } else {
                    signalCacheWarning = true;
                    logger.warn(String.format(Locale.ROOT,
                            "The grouping cache is active, but not used because it exceeded the max cache limit of %d percent",
                            maxDocsPercentageToCache));
                    logger.warn("Please increase cache size or disable group caching.");
                    searchWithTimeLimiter(luceneFilter, secondPhaseCollectors);
                }
            } else {
                if (pf.postFilter != null) {
                    pf.postFilter.setLastDelegate(secondPhaseCollectors);
                    secondPhaseCollectors = pf.postFilter;
                }
                searchWithTimeLimiter(luceneFilter, secondPhaseCollectors);
            }
        }
    }

    for (Command cmd : commands) {
        cmd.finish();
    }

    qr.groupedResults = grouped;

    if (getDocList) {
        int sz = idSet.size();
        int[] ids = new int[sz];
        int idx = 0;
        for (int val : idSet) {
            ids[idx++] = val;
        }
        qr.setDocList(new DocSlice(0, sz, ids, null, maxMatches, maxScore));
    }
}

From source file:org.elasticsearch.percolator.QueryCollector.java

License:Apache License

QueryCollector(ESLogger logger, PercolateContext context) {
    this.logger = logger;
    this.queries = context.percolateQueries();
    this.searcher = context.docSearcher();
    final FieldMapper<?> idMapper = context.mapperService().smartNameFieldMapper(IdFieldMapper.NAME);
    this.idFieldData = context.fieldData().getForField(idMapper);

    if (context.facets() != null) {
        for (SearchContextFacets.Entry entry : context.facets().entries()) {
            if (entry.isGlobal()) {
                continue; // not supported for now
            }//  w w  w . j a  v a 2 s . c o m
            Collector collector = entry.getFacetExecutor().collector();
            if (entry.getFilter() != null) {
                if (collector instanceof NestedFacetExecutor.Collector) {
                    collector = new NestedFacetExecutor.Collector((NestedFacetExecutor.Collector) collector,
                            entry.getFilter());
                } else {
                    collector = new FilteredCollector(collector, entry.getFilter());
                }
            }
            facetCollectors.add(collector);
        }
    }

    List<Collector> collectors = new ArrayList<Collector>(facetCollectors);
    if (context.aggregations() != null) {
        AggregationContext aggregationContext = new AggregationContext(context);
        context.aggregations().aggregationContext(aggregationContext);

        List<Aggregator> aggregatorCollectors = new ArrayList<Aggregator>();
        Aggregator[] aggregators = context.aggregations().factories()
                .createTopLevelAggregators(aggregationContext);
        for (int i = 0; i < aggregators.length; i++) {
            if (!(aggregators[i] instanceof GlobalAggregator)) {
                Aggregator aggregator = aggregators[i];
                if (aggregator.shouldCollect()) {
                    aggregatorCollectors.add(aggregator);
                }
            }
        }
        context.aggregations().aggregators(aggregators);
        if (!aggregatorCollectors.isEmpty()) {
            collectors
                    .add(new AggregationPhase.AggregationsCollector(aggregatorCollectors, aggregationContext));
        }
    }

    int size = collectors.size();
    if (size == 0) {
        facetAndAggregatorCollector = null;
    } else if (size == 1) {
        facetAndAggregatorCollector = collectors.get(0);
    } else {
        facetAndAggregatorCollector = MultiCollector.wrap(collectors.toArray(new Collector[collectors.size()]));
    }
}

From source file:org.elasticsearch.search.facet.FacetPhase.java

License:Apache License

@Override
public void execute(SearchContext context) throws ElasticsearchException {
    if (context.facets() == null) {
        return;/*from w  w  w.  java2 s.  com*/
    }

    if (context.queryResult().facets() != null) {
        // no need to compute the facets twice, they should be computed on a per context basis
        return;
    }

    Map<Filter, List<Collector>> filtersByCollector = null;
    List<ContextDocIdSet> globalDocSets = null;
    for (SearchContextFacets.Entry entry : context.facets().entries()) {
        if (!entry.isGlobal()) {
            if (entry.getMode() == FacetExecutor.Mode.POST) {
                FacetExecutor.Post post = entry.getFacetExecutor().post();
                if (entry.getFilter() != null) {
                    if (post instanceof NestedFacetExecutor.Post) {
                        post = new NestedFacetExecutor.Post((NestedFacetExecutor.Post) post, entry.getFilter());
                    } else {
                        post = new FacetExecutor.Post.Filtered(post, entry.getFilter());
                    }
                }
                try {
                    post.executePost(context.searcher().mainDocIdSetCollector().docSets());
                } catch (Exception e) {
                    throw new QueryPhaseExecutionException(context,
                            "failed to execute facet [" + entry.getFacetName() + "]", e);
                }
            }
        } else {
            if (entry.getMode() == FacetExecutor.Mode.POST) {
                if (globalDocSets == null) {
                    // build global post entries, map a reader context to a live docs docIdSet
                    List<AtomicReaderContext> leaves = context.searcher().getIndexReader().leaves();
                    globalDocSets = new ArrayList<ContextDocIdSet>(leaves.size());
                    for (AtomicReaderContext leaf : leaves) {
                        globalDocSets.add(new ContextDocIdSet(leaf, BitsFilteredDocIdSet
                                .wrap(new AllDocIdSet(leaf.reader().maxDoc()), leaf.reader().getLiveDocs())) // need to only include live docs
                        );
                    }
                }
                try {
                    FacetExecutor.Post post = entry.getFacetExecutor().post();
                    if (entry.getFilter() != null) {
                        if (post instanceof NestedFacetExecutor.Post) {
                            post = new NestedFacetExecutor.Post((NestedFacetExecutor.Post) post,
                                    entry.getFilter());
                        } else {
                            post = new FacetExecutor.Post.Filtered(post, entry.getFilter());
                        }
                    }
                    post.executePost(globalDocSets);
                } catch (Exception e) {
                    throw new QueryPhaseExecutionException(context,
                            "Failed to execute facet [" + entry.getFacetName() + "]", e);
                }
            } else if (entry.getMode() == FacetExecutor.Mode.COLLECTOR) {
                Filter filter = Queries.MATCH_ALL_FILTER;
                Collector collector = entry.getFacetExecutor().collector();
                if (entry.getFilter() != null) {
                    if (collector instanceof NestedFacetExecutor.Collector) {
                        collector = new NestedFacetExecutor.Collector((NestedFacetExecutor.Collector) collector,
                                entry.getFilter());
                    } else {
                        collector = new FilteredCollector(collector, entry.getFilter());
                    }
                }
                if (filtersByCollector == null) {
                    filtersByCollector = Maps.newHashMap();
                }
                List<Collector> list = filtersByCollector.get(filter);
                if (list == null) {
                    list = new ArrayList<Collector>();
                    filtersByCollector.put(filter, list);
                }
                list.add(collector);
            }
        }
    }

    // optimize the global collector based execution
    if (filtersByCollector != null) {
        // now, go and execute the filters->collector ones
        for (Map.Entry<Filter, List<Collector>> entry : filtersByCollector.entrySet()) {
            Filter filter = entry.getKey();
            Query query = new XConstantScoreQuery(filter);
            Filter searchFilter = context.searchFilter(context.types());
            if (searchFilter != null) {
                query = new XFilteredQuery(query, searchFilter);
            }
            try {
                context.searcher().search(query,
                        MultiCollector.wrap(entry.getValue().toArray(new Collector[entry.getValue().size()])));
            } catch (Exception e) {
                throw new QueryPhaseExecutionException(context, "Failed to execute global facets", e);
            }
            for (Collector collector : entry.getValue()) {
                if (collector instanceof XCollector) {
                    ((XCollector) collector).postCollection();
                }
            }
        }
    }

    List<Facet> facets = new ArrayList<Facet>(context.facets().entries().size());
    for (SearchContextFacets.Entry entry : context.facets().entries()) {
        facets.add(entry.getFacetExecutor().buildFacet(entry.getFacetName()));
    }
    context.queryResult().facets(new InternalFacets(facets));
}

From source file:org.elasticsearch.search.query.QueryCollectorContext.java

License:Apache License

/**
 * Creates a multi collector from the provided <code>subs</code>
 *//*from w  ww  .j av a  2s .  c  om*/
static QueryCollectorContext createMultiCollectorContext(Collection<Collector> subs) {
    return new QueryCollectorContext(REASON_SEARCH_MULTI) {
        @Override
        Collector create(Collector in) throws IOException {
            List<Collector> subCollectors = new ArrayList<>();
            subCollectors.add(in);
            subCollectors.addAll(subs);
            return MultiCollector.wrap(subCollectors);
        }

        @Override
        protected InternalProfileCollector createWithProfiler(InternalProfileCollector in) throws IOException {
            final List<InternalProfileCollector> subCollectors = new ArrayList<>();
            subCollectors.add(in);
            if (subs.stream().anyMatch((col) -> col instanceof InternalProfileCollector == false)) {
                throw new IllegalArgumentException("non-profiling collector");
            }
            for (Collector collector : subs) {
                subCollectors.add((InternalProfileCollector) collector);
            }
            final Collector collector = MultiCollector.wrap(subCollectors);
            return new InternalProfileCollector(collector, REASON_SEARCH_MULTI, subCollectors);
        }
    };
}

From source file:org.hibernate.search.backend.lucene.search.query.impl.LuceneCollectorsBuilder.java

License:LGPL

LuceneCollectors build() {
    Collector compositeCollector;
    if (luceneCollectors.size() == 1) {
        compositeCollector = topDocsCollector != null ? topDocsCollector : totalHitCountCollector;
    } else {/*w w w .  ja v a 2  s .  c  o m*/
        compositeCollector = MultiCollector.wrap(luceneCollectors);
    }

    return new LuceneCollectors(topDocsCollector, totalHitCountCollector, compositeCollector);
}