List of usage examples for org.apache.lucene.search IndexSearcher getIndexReader
public IndexReader getIndexReader()
From source file:org.elasticsearch.common.lucene.search.ExtendedIndexSearcher.java
License:Apache License
public ExtendedIndexSearcher(IndexSearcher searcher) { super(searcher.getIndexReader()); setSimilarity(searcher.getSimilarity()); }
From source file:org.elasticsearch.index.fielddata.AbstractStringFieldDataTestCase.java
License:Apache License
public void testActualMissingValue(boolean reverse) throws IOException { // missing value is set to an actual value final String[] values = new String[randomIntBetween(2, 30)]; for (int i = 1; i < values.length; ++i) { values[i] = TestUtil.randomUnicodeString(getRandom()); }//from w ww . j ava2s . c o m final int numDocs = scaledRandomIntBetween(10, 3072); for (int i = 0; i < numDocs; ++i) { final String value = RandomPicks.randomFrom(getRandom(), values); if (value == null) { writer.addDocument(new Document()); } else { Document d = new Document(); addField(d, "value", value); writer.addDocument(d); } if (randomInt(10) == 0) { writer.commit(); } } final IndexFieldData indexFieldData = getForField("value"); final String missingValue = values[1]; IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true)); XFieldComparatorSource comparator = indexFieldData.comparatorSource(missingValue, MultiValueMode.MIN, null); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(new SortField("value", comparator, reverse))); assertEquals(numDocs, topDocs.totalHits); BytesRef previousValue = reverse ? UnicodeUtil.BIG_TERM : new BytesRef(); for (int i = 0; i < topDocs.scoreDocs.length; ++i) { final String docValue = searcher.doc(topDocs.scoreDocs[i].doc).get("value"); final BytesRef value = new BytesRef(docValue == null ? missingValue : docValue); if (reverse) { assertTrue(previousValue.compareTo(value) >= 0); } else { assertTrue(previousValue.compareTo(value) <= 0); } previousValue = value; } searcher.getIndexReader().close(); }
From source file:org.elasticsearch.index.fielddata.AbstractStringFieldDataTestCase.java
License:Apache License
public void testSortMissing(boolean first, boolean reverse) throws IOException { final String[] values = new String[randomIntBetween(2, 10)]; for (int i = 1; i < values.length; ++i) { values[i] = TestUtil.randomUnicodeString(getRandom()); }//w w w . j a va2s . c o m final int numDocs = scaledRandomIntBetween(10, 3072); for (int i = 0; i < numDocs; ++i) { final String value = RandomPicks.randomFrom(getRandom(), values); if (value == null) { writer.addDocument(new Document()); } else { Document d = new Document(); addField(d, "value", value); writer.addDocument(d); } if (randomInt(10) == 0) { writer.commit(); } } final IndexFieldData indexFieldData = getForField("value"); IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true)); XFieldComparatorSource comparator = indexFieldData.comparatorSource(first ? "_first" : "_last", MultiValueMode.MIN, null); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(new SortField("value", comparator, reverse))); assertEquals(numDocs, topDocs.totalHits); BytesRef previousValue = first ? null : reverse ? UnicodeUtil.BIG_TERM : new BytesRef(); for (int i = 0; i < topDocs.scoreDocs.length; ++i) { final String docValue = searcher.doc(topDocs.scoreDocs[i].doc).get("value"); if (first && docValue == null) { assertNull(previousValue); } else if (!first && docValue != null) { assertNotNull(previousValue); } final BytesRef value = docValue == null ? null : new BytesRef(docValue); if (previousValue != null && value != null) { if (reverse) { assertTrue(previousValue.compareTo(value) >= 0); } else { assertTrue(previousValue.compareTo(value) <= 0); } } previousValue = value; } searcher.getIndexReader().close(); }
From source file:org.elasticsearch.index.fielddata.AbstractStringFieldDataTestCase.java
License:Apache License
public void testNestedSorting(MultiValueMode sortMode) throws IOException { final String[] values = new String[randomIntBetween(2, 20)]; for (int i = 0; i < values.length; ++i) { values[i] = TestUtil.randomSimpleString(getRandom()); }/* w ww .j ava 2 s.c o m*/ final int numParents = scaledRandomIntBetween(10, 3072); List<Document> docs = new ArrayList<>(); FixedBitSet parents = new FixedBitSet(64); for (int i = 0; i < numParents; ++i) { docs.clear(); final int numChildren = randomInt(4); for (int j = 0; j < numChildren; ++j) { final Document child = new Document(); final int numValues = randomInt(3); for (int k = 0; k < numValues; ++k) { final String value = RandomPicks.randomFrom(getRandom(), values); addField(child, "text", value); } docs.add(child); } final Document parent = new Document(); parent.add(new StringField("type", "parent", Store.YES)); final String value = RandomPicks.randomFrom(getRandom(), values); if (value != null) { addField(parent, "text", value); } docs.add(parent); int bit = parents.prevSetBit(parents.length() - 1) + docs.size(); parents = FixedBitSet.ensureCapacity(parents, bit); parents.set(bit); writer.addDocuments(docs); if (randomInt(10) == 0) { writer.commit(); } } DirectoryReader directoryReader = DirectoryReader.open(writer, true); directoryReader = ElasticsearchDirectoryReader.wrap(directoryReader, new ShardId(new Index("test"), 0)); IndexSearcher searcher = new IndexSearcher(directoryReader); IndexFieldData<?> fieldData = getForField("text"); final Object missingValue; switch (randomInt(4)) { case 0: missingValue = "_first"; break; case 1: missingValue = "_last"; break; case 2: missingValue = new BytesRef(RandomPicks.randomFrom(getRandom(), values)); break; default: missingValue = new BytesRef(TestUtil.randomSimpleString(getRandom())); break; } Query parentFilter = new TermQuery(new Term("type", "parent")); Query childFilter = Queries.not(parentFilter); Nested nested = createNested(searcher, parentFilter, childFilter); BytesRefFieldComparatorSource nestedComparatorSource = new BytesRefFieldComparatorSource(fieldData, missingValue, sortMode, nested); ToParentBlockJoinQuery query = new ToParentBlockJoinQuery(new ConstantScoreQuery(childFilter), new QueryBitSetProducer(parentFilter), ScoreMode.None); Sort sort = new Sort(new SortField("text", nestedComparatorSource)); TopFieldDocs topDocs = searcher.search(query, randomIntBetween(1, numParents), sort); assertTrue(topDocs.scoreDocs.length > 0); BytesRef previous = null; for (int i = 0; i < topDocs.scoreDocs.length; ++i) { final int docID = topDocs.scoreDocs[i].doc; assertTrue("expected " + docID + " to be a parent", parents.get(docID)); BytesRef cmpValue = null; for (int child = parents.prevSetBit(docID - 1) + 1; child < docID; ++child) { String[] sVals = searcher.doc(child).getValues("text"); final BytesRef[] vals; if (sVals.length == 0) { vals = new BytesRef[0]; } else { vals = new BytesRef[sVals.length]; for (int j = 0; j < vals.length; ++j) { vals[j] = new BytesRef(sVals[j]); } } for (BytesRef value : vals) { if (cmpValue == null) { cmpValue = value; } else if (sortMode == MultiValueMode.MIN && value.compareTo(cmpValue) < 0) { cmpValue = value; } else if (sortMode == MultiValueMode.MAX && value.compareTo(cmpValue) > 0) { cmpValue = value; } } } if (cmpValue == null) { if ("_first".equals(missingValue)) { cmpValue = new BytesRef(); } else if ("_last".equals(missingValue) == false) { cmpValue = (BytesRef) missingValue; } } if (previous != null && cmpValue != null) { assertTrue(previous.utf8ToString() + " / " + cmpValue.utf8ToString(), previous.compareTo(cmpValue) <= 0); } previous = cmpValue; } searcher.getIndexReader().close(); }
From source file:org.elasticsearch.index.fielddata.AbstractStringFieldDataTests.java
License:Apache License
public void testActualMissingValue(boolean reverse) throws IOException { // missing value is set to an actual value Document d = new Document(); final StringField s = new StringField("value", "", Field.Store.YES); d.add(s);/*from ww w. ja va2 s .co m*/ final String[] values = new String[randomIntBetween(2, 30)]; for (int i = 1; i < values.length; ++i) { values[i] = _TestUtil.randomUnicodeString(getRandom()); } final int numDocs = atLeast(100); for (int i = 0; i < numDocs; ++i) { final String value = RandomPicks.randomFrom(getRandom(), values); if (value == null) { writer.addDocument(new Document()); } else { s.setStringValue(value); writer.addDocument(d); } if (randomInt(10) == 0) { writer.commit(); } } final IndexFieldData indexFieldData = getForField("value"); final String missingValue = values[1]; IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true)); XFieldComparatorSource comparator = indexFieldData.comparatorSource(missingValue, SortMode.MIN); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(new SortField("value", comparator, reverse))); assertEquals(numDocs, topDocs.totalHits); BytesRef previousValue = reverse ? UnicodeUtil.BIG_TERM : new BytesRef(); for (int i = 0; i < topDocs.scoreDocs.length; ++i) { final String docValue = searcher.doc(topDocs.scoreDocs[i].doc).get("value"); final BytesRef value = new BytesRef(docValue == null ? missingValue : docValue); if (reverse) { assertTrue(previousValue.compareTo(value) >= 0); } else { assertTrue(previousValue.compareTo(value) <= 0); } previousValue = value; } searcher.getIndexReader().close(); }
From source file:org.elasticsearch.index.fielddata.AbstractStringFieldDataTests.java
License:Apache License
public void testSortMissing(boolean first, boolean reverse) throws IOException { Document d = new Document(); final StringField s = new StringField("value", "", Field.Store.YES); d.add(s);/*from w w w .ja v a 2 s. c om*/ final String[] values = new String[randomIntBetween(2, 10)]; for (int i = 1; i < values.length; ++i) { values[i] = _TestUtil.randomUnicodeString(getRandom()); } final int numDocs = atLeast(100); for (int i = 0; i < numDocs; ++i) { final String value = RandomPicks.randomFrom(getRandom(), values); if (value == null) { writer.addDocument(new Document()); } else { s.setStringValue(value); writer.addDocument(d); } if (randomInt(10) == 0) { writer.commit(); } } final IndexFieldData indexFieldData = getForField("value"); IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true)); XFieldComparatorSource comparator = indexFieldData.comparatorSource(first ? "_first" : "_last", SortMode.MIN); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(new SortField("value", comparator, reverse))); assertEquals(numDocs, topDocs.totalHits); BytesRef previousValue = first ? null : reverse ? UnicodeUtil.BIG_TERM : new BytesRef(); for (int i = 0; i < topDocs.scoreDocs.length; ++i) { final String docValue = searcher.doc(topDocs.scoreDocs[i].doc).get("value"); if (first && docValue == null) { assertNull(previousValue); } else if (!first && docValue != null) { assertNotNull(previousValue); } final BytesRef value = docValue == null ? null : new BytesRef(docValue); if (previousValue != null && value != null) { if (reverse) { assertTrue(previousValue.compareTo(value) >= 0); } else { assertTrue(previousValue.compareTo(value) <= 0); } } previousValue = value; } searcher.getIndexReader().close(); }
From source file:org.elasticsearch.index.fielddata.AbstractStringFieldDataTests.java
License:Apache License
public void testNestedSorting(SortMode sortMode) throws IOException { final String[] values = new String[randomIntBetween(2, 20)]; for (int i = 0; i < values.length; ++i) { values[i] = _TestUtil.randomSimpleString(getRandom()); }// w ww . j a va 2 s . c o m final int numParents = atLeast(100); List<Document> docs = new ArrayList<Document>(); final OpenBitSet parents = new OpenBitSet(); for (int i = 0; i < numParents; ++i) { docs.clear(); final int numChildren = randomInt(4); for (int j = 0; j < numChildren; ++j) { final Document child = new Document(); final int numValues = randomInt(3); for (int k = 0; k < numValues; ++k) { final String value = RandomPicks.randomFrom(getRandom(), values); child.add(new StringField("text", value, Store.YES)); } docs.add(child); } final Document parent = new Document(); parent.add(new StringField("type", "parent", Store.YES)); final String value = RandomPicks.randomFrom(getRandom(), values); if (value != null) { parent.add(new StringField("text", value, Store.YES)); } docs.add(parent); parents.set(parents.prevSetBit(parents.length() - 1) + docs.size()); writer.addDocuments(docs); if (randomInt(10) == 0) { writer.commit(); } } IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer, true)); IndexFieldData<?> fieldData = getForField("text"); final BytesRef missingValue; switch (randomInt(4)) { case 0: missingValue = new BytesRef(); break; case 1: missingValue = BytesRefFieldComparatorSource.MAX_TERM; break; case 2: missingValue = new BytesRef(RandomPicks.randomFrom(getRandom(), values)); break; default: missingValue = new BytesRef(_TestUtil.randomSimpleString(getRandom())); break; } BytesRefFieldComparatorSource innerSource = new BytesRefFieldComparatorSource(fieldData, missingValue, sortMode); Filter parentFilter = new TermFilter(new Term("type", "parent")); Filter childFilter = new NotFilter(parentFilter); NestedFieldComparatorSource nestedComparatorSource = new NestedFieldComparatorSource(sortMode, innerSource, parentFilter, childFilter); ToParentBlockJoinQuery query = new ToParentBlockJoinQuery( new XFilteredQuery(new MatchAllDocsQuery(), childFilter), new FixedBitSetCachingWrapperFilter(parentFilter), ScoreMode.None); Sort sort = new Sort(new SortField("text", nestedComparatorSource)); TopFieldDocs topDocs = searcher.search(query, randomIntBetween(1, numParents), sort); assertTrue(topDocs.scoreDocs.length > 0); BytesRef previous = null; for (int i = 0; i < topDocs.scoreDocs.length; ++i) { final int docID = topDocs.scoreDocs[i].doc; assertTrue("expected " + docID + " to be a parent", parents.get(docID)); BytesRef cmpValue = null; for (int child = parents.prevSetBit(docID - 1) + 1; child < docID; ++child) { String[] vals = searcher.doc(child).getValues("text"); if (vals.length == 0) { vals = new String[] { missingValue.utf8ToString() }; } for (String value : vals) { final BytesRef bytesValue = new BytesRef(value); if (cmpValue == null) { cmpValue = bytesValue; } else if (sortMode == SortMode.MIN && bytesValue.compareTo(cmpValue) < 0) { cmpValue = bytesValue; } else if (sortMode == SortMode.MAX && bytesValue.compareTo(cmpValue) > 0) { cmpValue = bytesValue; } } } if (cmpValue == null) { cmpValue = missingValue; } if (previous != null) { assertNotNull(cmpValue); assertTrue(previous.utf8ToString() + " / " + cmpValue.utf8ToString(), previous.compareTo(cmpValue) <= 0); } previous = cmpValue; } searcher.getIndexReader().close(); }
From source file:org.elasticsearch.index.percolator.PercolatorExecutor.java
License:Apache License
public Response percolate(DocAndQueryRequest request) throws ElasticSearchException { // first, parse the source doc into a MemoryIndex final CustomMemoryIndex memoryIndex = new CustomMemoryIndex(); // TODO: This means percolation does not support nested docs... for (Fieldable field : request.doc().masterDoc().getFields()) { if (!field.isIndexed()) { continue; }//from www. j a v a2s . c o m // no need to index the UID field if (field.name().equals(UidFieldMapper.NAME)) { continue; } TokenStream tokenStream = field.tokenStreamValue(); if (tokenStream != null) { memoryIndex.addField(field.name(), tokenStream, field.getBoost()); } else { Reader reader = field.readerValue(); if (reader != null) { try { memoryIndex.addField(field.name(), request.doc().analyzer().reusableTokenStream(field.name(), reader), field.getBoost() * request.doc().masterDoc().getBoost()); } catch (IOException e) { throw new MapperParsingException("Failed to analyze field [" + field.name() + "]", e); } } else { String value = field.stringValue(); if (value != null) { try { memoryIndex.addField(field.name(), request.doc().analyzer().reusableTokenStream(field.name(), new FastStringReader(value)), field.getBoost() * request.doc().masterDoc().getBoost()); } catch (IOException e) { throw new MapperParsingException("Failed to analyze field [" + field.name() + "]", e); } } } } } final IndexSearcher searcher = memoryIndex.createSearcher(); List<String> matches = new ArrayList<String>(); if (request.query() == null) { Lucene.ExistsCollector collector = new Lucene.ExistsCollector(); for (Map.Entry<String, Query> entry : queries.entrySet()) { collector.reset(); try { searcher.search(entry.getValue(), collector); } catch (IOException e) { logger.warn("[" + entry.getKey() + "] failed to execute query", e); } if (collector.exists()) { matches.add(entry.getKey()); } } } else { IndexService percolatorIndex = indicesService.indexService(PercolatorService.INDEX_NAME); if (percolatorIndex == null) { throw new PercolateIndexUnavailable(new Index(PercolatorService.INDEX_NAME)); } if (percolatorIndex.numberOfShards() == 0) { throw new PercolateIndexUnavailable(new Index(PercolatorService.INDEX_NAME)); } IndexShard percolatorShard = percolatorIndex.shard(0); Engine.Searcher percolatorSearcher = percolatorShard.searcher(); try { percolatorSearcher.searcher().search(request.query(), new QueryCollector(logger, queries, searcher, percolatorIndex, matches)); } catch (IOException e) { logger.warn("failed to execute", e); } finally { percolatorSearcher.release(); } } indexCache.clear(searcher.getIndexReader()); return new Response(matches, request.doc().mappersAdded()); }
From source file:org.elasticsearch.index.percolator.PercolatorHighlightSubFetchPhase.java
License:Apache License
@Override public void hitsExecute(SearchContext context, InternalSearchHit[] hits) { PercolateQuery percolateQuery = locatePercolatorQuery(context.query()); if (percolateQuery == null) { // shouldn't happen as we checked for the existence of a percolator query in hitsExecutionNeeded(...) throw new IllegalStateException("couldn't locate percolator query"); }//from w w w .j a v a 2 s. c o m List<LeafReaderContext> ctxs = context.searcher().getIndexReader().leaves(); PercolatorQueryCache queriesRegistry = context.percolatorQueryCache(); IndexSearcher percolatorIndexSearcher = percolateQuery.getPercolatorIndexSearcher(); LeafReaderContext percolatorLeafReaderContext = percolatorIndexSearcher.getIndexReader().leaves().get(0); FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext(); SubSearchContext subSearchContext = createSubSearchContext(context, percolatorLeafReaderContext, percolateQuery.getDocumentSource()); for (InternalSearchHit hit : hits) { LeafReaderContext ctx = ctxs.get(ReaderUtil.subIndex(hit.docId(), ctxs)); int segmentDocId = hit.docId() - ctx.docBase; Query query = queriesRegistry.getQueries(ctx).getQuery(segmentDocId); if (query != null) { subSearchContext.parsedQuery(new ParsedQuery(query)); hitContext.reset(new InternalSearchHit(0, "unknown", new Text(percolateQuery.getDocumentType()), Collections.emptyMap()), percolatorLeafReaderContext, 0, percolatorIndexSearcher); hitContext.cache().clear(); highlightPhase.hitExecute(subSearchContext, hitContext); hit.highlightFields().putAll(hitContext.hit().getHighlightFields()); } } }
From source file:org.elasticsearch.index.query.functionscore.FunctionScoreTests.java
License:Apache License
public Explanation getFunctionScoreExplanation(IndexSearcher searcher, ScoreFunction scoreFunction) throws IOException { FunctionScoreQuery functionScoreQuery = new FunctionScoreQuery(new TermQuery(TERM), scoreFunction, 0.0f); functionScoreQuery.setCombineFunction(CombineFunction.AVG); Weight weight = searcher.createNormalizedWeight(functionScoreQuery, true); Explanation explanation = weight.explain(searcher.getIndexReader().leaves().get(0), 0); return explanation.getDetails()[1]; }