List of usage examples for org.apache.lucene.search IndexSearcher searchAfter
public TopDocs searchAfter(ScoreDoc after, Query query, int numHits) throws IOException
n
hits for query
where all results are after a previous result (after
). From source file:org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndex.java
License:Apache License
@Override public Cursor query(final IndexPlan plan, NodeState rootState) { final Filter filter = plan.getFilter(); FullTextExpression ft = filter.getFullTextConstraint(); final Set<String> relPaths = getRelativePaths(ft); if (relPaths.size() > 1) { return new MultiLuceneIndex(filter, rootState, relPaths).query(); }/* w w w . j a va 2 s. co m*/ final String parent = relPaths.size() == 0 ? "" : relPaths.iterator().next(); // we only restrict non-full-text conditions if there is // no relative property in the full-text constraint final boolean nonFullTextConstraints = parent.isEmpty(); final int parentDepth = getDepth(parent); QueryEngineSettings settings = filter.getQueryEngineSettings(); Iterator<LuceneResultRow> itr = new AbstractIterator<LuceneResultRow>() { private final Deque<LuceneResultRow> queue = Queues.newArrayDeque(); private final Set<String> seenPaths = Sets.newHashSet(); private ScoreDoc lastDoc; private int nextBatchSize = LUCENE_QUERY_BATCH_SIZE; private boolean noDocs = false; private long lastSearchIndexerVersion; @Override protected LuceneResultRow computeNext() { while (!queue.isEmpty() || loadDocs()) { return queue.remove(); } return endOfData(); } private LuceneResultRow convertToRow(ScoreDoc doc, IndexSearcher searcher, String excerpt) throws IOException { IndexReader reader = searcher.getIndexReader(); PathStoredFieldVisitor visitor = new PathStoredFieldVisitor(); reader.document(doc.doc, visitor); String path = visitor.getPath(); if (path != null) { if ("".equals(path)) { path = "/"; } if (!parent.isEmpty()) { // TODO OAK-828 this breaks node aggregation // get the base path // ensure the path ends with the given // relative path // if (!path.endsWith("/" + parent)) { // continue; // } path = getAncestorPath(path, parentDepth); // avoid duplicate entries if (seenPaths.contains(path)) { return null; } seenPaths.add(path); } return new LuceneResultRow(path, doc.score, excerpt); } return null; } /** * Loads the lucene documents in batches * @return true if any document is loaded */ private boolean loadDocs() { if (noDocs) { return false; } ScoreDoc lastDocToRecord = null; IndexNode indexNode = tracker.acquireIndexNode((String) plan.getAttribute(ATTR_INDEX_PATH)); checkState(indexNode != null); try { IndexSearcher searcher = indexNode.getSearcher(); LuceneRequestFacade luceneRequestFacade = getLuceneRequest(filter, searcher.getIndexReader(), nonFullTextConstraints, indexNode.getDefinition()); if (luceneRequestFacade.getLuceneRequest() instanceof Query) { Query query = (Query) luceneRequestFacade.getLuceneRequest(); TopDocs docs; long time = System.currentTimeMillis(); checkForIndexVersionChange(searcher); while (true) { if (lastDoc != null) { LOG.debug("loading the next {} entries for query {}", nextBatchSize, query); docs = searcher.searchAfter(lastDoc, query, nextBatchSize); } else { LOG.debug("loading the first {} entries for query {}", nextBatchSize, query); docs = searcher.search(query, nextBatchSize); } time = System.currentTimeMillis() - time; LOG.debug("... took {} ms", time); nextBatchSize = (int) Math.min(nextBatchSize * 2L, 100000); PropertyRestriction restriction = filter.getPropertyRestriction(QueryImpl.REP_EXCERPT); boolean addExcerpt = restriction != null && restriction.isNotNullRestriction(); Analyzer analyzer = indexNode.getDefinition().getAnalyzer(); if (addExcerpt) { // setup highlighter QueryScorer scorer = new QueryScorer(query); scorer.setExpandMultiTermQuery(true); highlighter.setFragmentScorer(scorer); } for (ScoreDoc doc : docs.scoreDocs) { String excerpt = null; if (addExcerpt) { excerpt = getExcerpt(analyzer, searcher, doc); } LuceneResultRow row = convertToRow(doc, searcher, excerpt); if (row != null) { queue.add(row); } lastDocToRecord = doc; } if (queue.isEmpty() && docs.scoreDocs.length > 0) { lastDoc = lastDocToRecord; } else { break; } } } else if (luceneRequestFacade.getLuceneRequest() instanceof SpellcheckHelper.SpellcheckQuery) { SpellcheckHelper.SpellcheckQuery spellcheckQuery = (SpellcheckHelper.SpellcheckQuery) luceneRequestFacade .getLuceneRequest(); noDocs = true; SuggestWord[] suggestWords = SpellcheckHelper.getSpellcheck(spellcheckQuery); // ACL filter spellchecks Collection<String> suggestedWords = new ArrayList<String>(suggestWords.length); QueryParser qp = new QueryParser(Version.LUCENE_47, FieldNames.SUGGEST, indexNode.getDefinition().getAnalyzer()); for (SuggestWord suggestion : suggestWords) { Query query = qp.createPhraseQuery(FieldNames.SUGGEST, suggestion.string); TopDocs topDocs = searcher.search(query, 100); if (topDocs.totalHits > 0) { for (ScoreDoc doc : topDocs.scoreDocs) { Document retrievedDoc = searcher.doc(doc.doc); if (filter.isAccessible(retrievedDoc.get(FieldNames.PATH))) { suggestedWords.add(suggestion.string); break; } } } } queue.add(new LuceneResultRow(suggestedWords)); } else if (luceneRequestFacade.getLuceneRequest() instanceof SuggestHelper.SuggestQuery) { SuggestHelper.SuggestQuery suggestQuery = (SuggestHelper.SuggestQuery) luceneRequestFacade .getLuceneRequest(); noDocs = true; List<Lookup.LookupResult> lookupResults = SuggestHelper .getSuggestions(indexNode.getLookup(), suggestQuery); // ACL filter suggestions Collection<String> suggestedWords = new ArrayList<String>(lookupResults.size()); QueryParser qp = new QueryParser(Version.LUCENE_47, FieldNames.FULLTEXT, indexNode.getDefinition().getAnalyzer()); for (Lookup.LookupResult suggestion : lookupResults) { Query query = qp.createPhraseQuery(FieldNames.FULLTEXT, suggestion.key.toString()); TopDocs topDocs = searcher.search(query, 100); if (topDocs.totalHits > 0) { for (ScoreDoc doc : topDocs.scoreDocs) { Document retrievedDoc = searcher.doc(doc.doc); if (filter.isAccessible(retrievedDoc.get(FieldNames.PATH))) { suggestedWords.add( "{term=" + suggestion.key + ",weight=" + suggestion.value + "}"); break; } } } } queue.add(new LuceneResultRow(suggestedWords)); } } catch (IOException e) { LOG.warn("query via {} failed.", LuceneIndex.this, e); } finally { indexNode.release(); } if (lastDocToRecord != null) { this.lastDoc = lastDocToRecord; } return !queue.isEmpty(); } private void checkForIndexVersionChange(IndexSearcher searcher) { long currentVersion = LucenePropertyIndex.getVersion(searcher); if (currentVersion != lastSearchIndexerVersion && lastDoc != null) { lastDoc = null; LOG.debug("Change in index version detected {} => {}. Query would be performed without " + "offset", currentVersion, lastSearchIndexerVersion); } this.lastSearchIndexerVersion = currentVersion; } }; SizeEstimator sizeEstimator = new SizeEstimator() { @Override public long getSize() { IndexNode indexNode = tracker.acquireIndexNode((String) plan.getAttribute(ATTR_INDEX_PATH)); checkState(indexNode != null); try { IndexSearcher searcher = indexNode.getSearcher(); LuceneRequestFacade luceneRequestFacade = getLuceneRequest(filter, searcher.getIndexReader(), nonFullTextConstraints, indexNode.getDefinition()); if (luceneRequestFacade.getLuceneRequest() instanceof Query) { Query query = (Query) luceneRequestFacade.getLuceneRequest(); TotalHitCountCollector collector = new TotalHitCountCollector(); searcher.search(query, collector); int totalHits = collector.getTotalHits(); LOG.debug("Estimated size for query {} is {}", query, totalHits); return totalHits; } LOG.debug("Estimated size: not a Query: {}", luceneRequestFacade.getLuceneRequest()); } catch (IOException e) { LOG.warn("query via {} failed.", LuceneIndex.this, e); } finally { indexNode.release(); } return -1; } }; return new LucenePathCursor(itr, settings, sizeEstimator); }
From source file:org.apache.jackrabbit.oak.plugins.index.lucene.LucenePropertyIndex.java
License:Apache License
@Override public Cursor query(final IndexPlan plan, NodeState rootState) { final Filter filter = plan.getFilter(); final Sort sort = getSort(plan); final PlanResult pr = getPlanResult(plan); QueryEngineSettings settings = filter.getQueryEngineSettings(); Iterator<LuceneResultRow> itr = new AbstractIterator<LuceneResultRow>() { private final Deque<LuceneResultRow> queue = Queues.newArrayDeque(); private final Set<String> seenPaths = Sets.newHashSet(); private ScoreDoc lastDoc; private int nextBatchSize = LUCENE_QUERY_BATCH_SIZE; private boolean noDocs = false; private long lastSearchIndexerVersion; @Override/*from ww w .ja v a2s . co m*/ protected LuceneResultRow computeNext() { while (!queue.isEmpty() || loadDocs()) { return queue.remove(); } return endOfData(); } private LuceneResultRow convertToRow(ScoreDoc doc, IndexSearcher searcher, String excerpt, Facets facets, String explanation) throws IOException { IndexReader reader = searcher.getIndexReader(); //TODO Look into usage of field cache for retrieving the path //instead of reading via reader if no of docs in index are limited PathStoredFieldVisitor visitor = new PathStoredFieldVisitor(); reader.document(doc.doc, visitor); String path = visitor.getPath(); if (path != null) { if ("".equals(path)) { path = "/"; } if (pr.isPathTransformed()) { String originalPath = path; path = pr.transformPath(path); if (path == null) { LOG.trace("Ignoring path {} : Transformation returned null", originalPath); return null; } // avoid duplicate entries if (seenPaths.contains(path)) { LOG.trace("Ignoring path {} : Duplicate post transformation", originalPath); return null; } seenPaths.add(path); } LOG.trace("Matched path {}", path); return new LuceneResultRow(path, doc.score, excerpt, facets, explanation); } return null; } /** * Loads the lucene documents in batches * @return true if any document is loaded */ private boolean loadDocs() { if (noDocs) { return false; } ScoreDoc lastDocToRecord = null; final IndexNode indexNode = acquireIndexNode(plan); checkState(indexNode != null); try { IndexSearcher searcher = indexNode.getSearcher(); LuceneRequestFacade luceneRequestFacade = getLuceneRequest(plan, augmentorFactory, searcher.getIndexReader()); if (luceneRequestFacade.getLuceneRequest() instanceof Query) { Query query = (Query) luceneRequestFacade.getLuceneRequest(); CustomScoreQuery customScoreQuery = getCustomScoreQuery(plan, query); if (customScoreQuery != null) { query = customScoreQuery; } checkForIndexVersionChange(searcher); TopDocs docs; long start = PERF_LOGGER.start(); while (true) { if (lastDoc != null) { LOG.debug("loading the next {} entries for query {}", nextBatchSize, query); if (sort == null) { docs = searcher.searchAfter(lastDoc, query, nextBatchSize); } else { docs = searcher.searchAfter(lastDoc, query, nextBatchSize, sort); } } else { LOG.debug("loading the first {} entries for query {}", nextBatchSize, query); if (sort == null) { docs = searcher.search(query, nextBatchSize); } else { docs = searcher.search(query, nextBatchSize, sort); } } PERF_LOGGER.end(start, -1, "{} ...", docs.scoreDocs.length); nextBatchSize = (int) Math.min(nextBatchSize * 2L, 100000); long f = PERF_LOGGER.start(); Facets facets = FacetHelper.getFacets(searcher, query, docs, plan, indexNode.getDefinition().isSecureFacets()); PERF_LOGGER.end(f, -1, "facets retrieved"); PropertyRestriction restriction = filter.getPropertyRestriction(QueryImpl.REP_EXCERPT); boolean addExcerpt = restriction != null && restriction.isNotNullRestriction(); restriction = filter.getPropertyRestriction(QueryImpl.OAK_SCORE_EXPLANATION); boolean addExplain = restriction != null && restriction.isNotNullRestriction(); Analyzer analyzer = indexNode.getDefinition().getAnalyzer(); FieldInfos mergedFieldInfos = null; if (addExcerpt) { // setup highlighter QueryScorer scorer = new QueryScorer(query); scorer.setExpandMultiTermQuery(true); highlighter.setFragmentScorer(scorer); mergedFieldInfos = MultiFields.getMergedFieldInfos(searcher.getIndexReader()); } for (ScoreDoc doc : docs.scoreDocs) { String excerpt = null; if (addExcerpt) { excerpt = getExcerpt(query, analyzer, searcher, doc, mergedFieldInfos); } String explanation = null; if (addExplain) { explanation = searcher.explain(query, doc.doc).toString(); } LuceneResultRow row = convertToRow(doc, searcher, excerpt, facets, explanation); if (row != null) { queue.add(row); } lastDocToRecord = doc; } if (queue.isEmpty() && docs.scoreDocs.length > 0) { //queue is still empty but more results can be fetched //from Lucene so still continue lastDoc = lastDocToRecord; } else { break; } } } else if (luceneRequestFacade.getLuceneRequest() instanceof SpellcheckHelper.SpellcheckQuery) { String aclCheckField = indexNode.getDefinition().isFullTextEnabled() ? FieldNames.FULLTEXT : FieldNames.SPELLCHECK; noDocs = true; SpellcheckHelper.SpellcheckQuery spellcheckQuery = (SpellcheckHelper.SpellcheckQuery) luceneRequestFacade .getLuceneRequest(); SuggestWord[] suggestWords = SpellcheckHelper.getSpellcheck(spellcheckQuery); // ACL filter spellchecks QueryParser qp = new QueryParser(Version.LUCENE_47, aclCheckField, indexNode.getDefinition().getAnalyzer()); for (SuggestWord suggestion : suggestWords) { Query query = qp.createPhraseQuery(aclCheckField, QueryParserBase.escape(suggestion.string)); query = addDescendantClauseIfRequired(query, plan); TopDocs topDocs = searcher.search(query, 100); if (topDocs.totalHits > 0) { for (ScoreDoc doc : topDocs.scoreDocs) { Document retrievedDoc = searcher.doc(doc.doc); String prefix = filter.getPath(); if (prefix.length() == 1) { prefix = ""; } if (filter.isAccessible(prefix + retrievedDoc.get(FieldNames.PATH))) { queue.add(new LuceneResultRow(suggestion.string)); break; } } } } } else if (luceneRequestFacade.getLuceneRequest() instanceof SuggestHelper.SuggestQuery) { SuggestHelper.SuggestQuery suggestQuery = (SuggestHelper.SuggestQuery) luceneRequestFacade .getLuceneRequest(); noDocs = true; List<Lookup.LookupResult> lookupResults = SuggestHelper .getSuggestions(indexNode.getLookup(), suggestQuery); QueryParser qp = new QueryParser(Version.LUCENE_47, FieldNames.SUGGEST, indexNode.getDefinition().isSuggestAnalyzed() ? indexNode.getDefinition().getAnalyzer() : SuggestHelper.getAnalyzer()); // ACL filter suggestions for (Lookup.LookupResult suggestion : lookupResults) { Query query = qp.parse("\"" + QueryParserBase.escape(suggestion.key.toString()) + "\""); query = addDescendantClauseIfRequired(query, plan); TopDocs topDocs = searcher.search(query, 100); if (topDocs.totalHits > 0) { for (ScoreDoc doc : topDocs.scoreDocs) { Document retrievedDoc = searcher.doc(doc.doc); String prefix = filter.getPath(); if (prefix.length() == 1) { prefix = ""; } if (filter.isAccessible(prefix + retrievedDoc.get(FieldNames.PATH))) { queue.add(new LuceneResultRow(suggestion.key.toString(), suggestion.value)); break; } } } } } } catch (Exception e) { LOG.warn("query via {} failed.", LucenePropertyIndex.this, e); } finally { indexNode.release(); } if (lastDocToRecord != null) { this.lastDoc = lastDocToRecord; } return !queue.isEmpty(); } private void checkForIndexVersionChange(IndexSearcher searcher) { long currentVersion = getVersion(searcher); if (currentVersion != lastSearchIndexerVersion && lastDoc != null) { lastDoc = null; LOG.debug("Change in index version detected {} => {}. Query would be performed without " + "offset", currentVersion, lastSearchIndexerVersion); } this.lastSearchIndexerVersion = currentVersion; } }; SizeEstimator sizeEstimator = new SizeEstimator() { @Override public long getSize() { IndexNode indexNode = acquireIndexNode(plan); checkState(indexNode != null); try { IndexSearcher searcher = indexNode.getSearcher(); LuceneRequestFacade luceneRequestFacade = getLuceneRequest(plan, augmentorFactory, searcher.getIndexReader()); if (luceneRequestFacade.getLuceneRequest() instanceof Query) { Query query = (Query) luceneRequestFacade.getLuceneRequest(); TotalHitCountCollector collector = new TotalHitCountCollector(); searcher.search(query, collector); int totalHits = collector.getTotalHits(); LOG.debug("Estimated size for query {} is {}", query, totalHits); return totalHits; } LOG.debug("estimate size: not a Query: {}", luceneRequestFacade.getLuceneRequest()); } catch (IOException e) { LOG.warn("query via {} failed.", LucenePropertyIndex.this, e); } finally { indexNode.release(); } return -1; } }; return new LucenePathCursor(itr, plan, settings, sizeEstimator); }
From source file:org.eclipse.che.api.vfs.search.impl.LuceneSearcher.java
License:Open Source License
@Override public SearchResult search(QueryExpression query) throws ServerException { IndexSearcher luceneSearcher = null; try {/*from w ww .j a v a 2s . c o m*/ final long startTime = System.currentTimeMillis(); searcherManager.maybeRefresh(); luceneSearcher = searcherManager.acquire(); Query luceneQuery = createLuceneQuery(query); ScoreDoc after = null; final int numSkipDocs = Math.max(0, query.getSkipCount()); if (numSkipDocs > 0) { after = skipScoreDocs(luceneSearcher, luceneQuery, numSkipDocs); } final int numDocs = query.getMaxItems() > 0 ? Math.min(query.getMaxItems(), RESULT_LIMIT) : RESULT_LIMIT; TopDocs topDocs = luceneSearcher.searchAfter(after, luceneQuery, numDocs); final int totalHitsNum = topDocs.totalHits; List<SearchResultEntry> results = newArrayList(); for (int i = 0; i < topDocs.scoreDocs.length; i++) { ScoreDoc scoreDoc = topDocs.scoreDocs[i]; String filePath = luceneSearcher.doc(scoreDoc.doc).getField("path").stringValue(); results.add(new SearchResultEntry(filePath)); } final long elapsedTimeMillis = System.currentTimeMillis() - startTime; boolean hasMoreToRetrieve = numSkipDocs + topDocs.scoreDocs.length + 1 < totalHitsNum; QueryExpression nextPageQueryExpression = null; if (hasMoreToRetrieve) { nextPageQueryExpression = createNextPageQuery(query, numSkipDocs + topDocs.scoreDocs.length); } return SearchResult.aSearchResult().withResults(results).withTotalHits(totalHitsNum) .withNextPageQueryExpression(nextPageQueryExpression).withElapsedTimeMillis(elapsedTimeMillis) .build(); } catch (IOException e) { throw new ServerException(e.getMessage(), e); } finally { try { searcherManager.release(luceneSearcher); } catch (IOException e) { LOG.error(e.getMessage()); } } }
From source file:org.eclipse.che.api.vfs.search.impl.LuceneSearcher.java
License:Open Source License
private ScoreDoc skipScoreDocs(IndexSearcher luceneSearcher, Query luceneQuery, int numSkipDocs) throws IOException { final int readFrameSize = Math.min(numSkipDocs, RESULT_LIMIT); ScoreDoc scoreDoc = null;//from w ww .j av a 2s . c o m int retrievedDocs = 0; TopDocs topDocs; do { topDocs = luceneSearcher.searchAfter(scoreDoc, luceneQuery, readFrameSize); if (topDocs.scoreDocs.length > 0) { scoreDoc = topDocs.scoreDocs[topDocs.scoreDocs.length - 1]; } retrievedDocs += topDocs.scoreDocs.length; } while (retrievedDocs < numSkipDocs && topDocs.scoreDocs.length > 0); if (retrievedDocs > numSkipDocs) { int lastScoreDocIndex = topDocs.scoreDocs.length - (retrievedDocs - numSkipDocs); scoreDoc = topDocs.scoreDocs[lastScoreDocIndex]; } return scoreDoc; }
From source file:org.lukhnos.lucenestudy.Searcher.java
License:MIT License
SearchResult searchAfter(ScoreDoc lastScoreDoc, Query query, Sort sort, int maxCount) throws IOException { if (maxCount < 1) { throw new AssertionError("maxCount must be at least 1, but instead: " + maxCount); }//from w w w.ja v a2s .c om IndexSearcher searcher = new IndexSearcher(indexReader); TopDocs topDocs; int actualMaxCount = maxCount + 1; if (lastScoreDoc == null) { if (sort == null) { topDocs = searcher.search(query, actualMaxCount); } else { topDocs = searcher.search(query, actualMaxCount, sort); } } else { if (sort == null) { topDocs = searcher.searchAfter(lastScoreDoc, query, actualMaxCount); } else { topDocs = searcher.searchAfter(lastScoreDoc, query, actualMaxCount, sort); } } ScoreDoc nextSearchAfterDoc = null; int topDocsLen; if (topDocs.scoreDocs.length > maxCount) { nextSearchAfterDoc = topDocs.scoreDocs[maxCount - 1]; topDocsLen = maxCount; } else { topDocsLen = topDocs.scoreDocs.length; } HighlightingHelper highlightingHelper = new HighlightingHelper(query, analyzer); List<Document> docs = new ArrayList<>(); for (int i = 0; i < topDocsLen; i++) { org.apache.lucene.document.Document luceneDoc = indexReader.document(topDocs.scoreDocs[i].doc); Document doc = Indexer.fromLuceneDocument(luceneDoc); docs.add(doc); } return new SearchResult(topDocs.totalHits, docs, nextSearchAfterDoc, query, sort, highlightingHelper); }
From source file:org.neo4j.kernel.api.impl.index.PageOfRangesIteratorTest.java
License:Open Source License
@Test public void shouldReadPagesOfDocumentsFromSearcher() throws Exception { final int labelId = 7; final int pageSize = 2; // given//from w w w. java 2 s . com Query query = mock(Query.class); IndexSearcher searcher = mock(IndexSearcher.class); ScoreDoc doc1 = new ScoreDoc(37, 0.0f); ScoreDoc doc2 = new ScoreDoc(16, 0.0f); ScoreDoc doc3 = new ScoreDoc(11, 0.0f); when(searcher.searchAfter(any(ScoreDoc.class), same(query), anyInt())).thenReturn(docs(doc1, doc2), // page1 docs(doc3) // page2 ); when(searcher.doc(37)).thenReturn(document(format.rangeField(0x1), format.labelField(labelId, 0x01))); when(searcher.doc(16)).thenReturn(document(format.rangeField(0x2), format.labelField(labelId, 0x03))); when(searcher.doc(11)).thenReturn(document(format.rangeField(0x3), format.labelField(labelId, 0x30))); PrimitiveLongIterator iterator = concat( new PageOfRangesIterator(format, searcher, pageSize, query, labelId)); // when List<Long> longs = primitivesList(iterator); // then assertEquals(asList(/*doc1:*/(1L << format.bitmapFormat().shift), /*doc2:*/(2L << format.bitmapFormat().shift), (2L << format.bitmapFormat().shift) + 1, /*doc3:*/(3L << format.bitmapFormat().shift) + 4, (3L << format.bitmapFormat().shift) + 5), longs); ArgumentCaptor<ScoreDoc> prefixCollector = ArgumentCaptor.forClass(ScoreDoc.class); verify(searcher, times(2)).searchAfter(prefixCollector.capture(), same(query), eq(2)); assertEquals(asList(null, doc2), prefixCollector.getAllValues()); verify(searcher, times(3)).doc(anyInt()); verifyNoMoreInteractions(searcher); }
From source file:org.talend.dataquality.semantic.api.LocalDictionaryCache.java
License:Open Source License
private TopDocs sendListDocumentsQuery(String categoryName, int offset, int n) throws IOException { mgr.maybeRefresh();/*from w w w . j a v a 2s. c o m*/ IndexSearcher searcher = mgr.acquire(); TopDocs result = null; if (offset <= 0) { result = searcher.search(getListDocumentsQuery(categoryName), n); } else { TopDocs topDocs = searcher.search(getListDocumentsQuery(categoryName), offset + n); Query q = new TermQuery(new Term(DictionarySearcher.F_WORD, categoryName)); result = searcher.searchAfter(topDocs.scoreDocs[Math.min(topDocs.totalHits, offset) - 1], q, n); } mgr.release(searcher); return result; }