List of usage examples for org.apache.lucene.search DocIdSet DocIdSet
DocIdSet
From source file:cn.hbu.cs.esearch.store.LuceneStore.java
License:Apache License
@Override protected void persistDelete(long uid) throws IOException { final int docid = mapDocId(uid); if (docid < 0) { return;/*from ww w . j a v a2 s. c o m*/ } Query deleteQ = new ConstantScoreQuery(new Filter() { @Override public DocIdSet getDocIdSet(AtomicReaderContext readerCtx, Bits acceptedDocs) throws IOException { return new DocIdSet() { @Override public DocIdSetIterator iterator() throws IOException { return new DocIdSetIterator() { int currId = -1; @Override public int nextDoc() throws IOException { if (currId == -1) { currId = docid; } else { currId = DocIdSetIterator.NO_MORE_DOCS; } return currId; } @Override public int docID() { return currId; } @Override public int advance(int target) throws IOException { if (currId != DocIdSetIterator.NO_MORE_DOCS) { if (target < docid) { currId = docid; } else { currId = DocIdSetIterator.NO_MORE_DOCS; } } return currId; } @Override public long cost() { // TODO Auto-generated method stub return 0; } }; } }; } }); indexWriter.deleteDocuments(deleteQ); if (currentReaderData != null) { currentReaderData.uidMap.remove(uid); } }
From source file:com.senseidb.indexing.activity.deletion.PurgeFilterWrapper.java
License:Apache License
@Override public DocIdSet getDocIdSet(final IndexReader reader) throws IOException { final ZoieIndexReader zoieIndexReader = (ZoieIndexReader) reader; return new DocIdSet() { public DocIdSetIterator iterator() throws IOException { return new DocIdSetIteratorWrapper(internal.getDocIdSet(reader).iterator()) { @Override/* w w w.j a v a 2 s .c o m*/ protected void handeDoc(int ret) { deletionListener.onDelete(reader, zoieIndexReader.getUID(ret)); } }; } }; }
From source file:lucene.security.search.DocumentVisibilityFilter.java
License:Apache License
public static DocIdSet getLogicalOr(final List<DocIdSet> list) throws IOException { if (list.size() == 0) { return DocIdSet.EMPTY_DOCIDSET; }/*from www. j av a 2s . com*/ if (list.size() == 1) { DocIdSet docIdSet = list.get(0); Bits bits = docIdSet.bits(); if (bits == null) { throw new IOException("Bits are not allowed to be null for DocIdSet [" + docIdSet + "]."); } return docIdSet; } int index = 0; final Bits[] bitsArray = new Bits[list.size()]; int length = -1; for (DocIdSet docIdSet : list) { Bits bits = docIdSet.bits(); if (bits == null) { throw new IOException("Bits are not allowed to be null for DocIdSet [" + docIdSet + "]."); } bitsArray[index] = bits; index++; if (length < 0) { length = bits.length(); } else if (length != bits.length()) { throw new IOException( "Bits length need to be the same [" + length + "] and [" + bits.length() + "]"); } } final int len = length; return new DocIdSet() { @Override public Bits bits() throws IOException { return new Bits() { @Override public boolean get(int index) { for (int i = 0; i < bitsArray.length; i++) { if (bitsArray[i].get(index)) { return true; } } return false; } @Override public int length() { return len; } }; } @Override public boolean isCacheable() { return true; } @Override public DocIdSetIterator iterator() throws IOException { final DocIdSetIterator[] docIdSetIteratorArray = new DocIdSetIterator[list.size()]; long c = 0; int index = 0; for (DocIdSet docIdSet : list) { DocIdSetIterator iterator = docIdSet.iterator(); iterator.nextDoc(); docIdSetIteratorArray[index] = iterator; c += iterator.cost(); index++; } final long cost = c; return new DocIdSetIterator() { private int _docId = -1; @Override public int advance(int target) throws IOException { callAdvanceOnAllThatAreBehind(target); Arrays.sort(docIdSetIteratorArray, COMPARATOR); DocIdSetIterator iterator = docIdSetIteratorArray[0]; return _docId = iterator.docID(); } private void callAdvanceOnAllThatAreBehind(int target) throws IOException { for (int i = 0; i < docIdSetIteratorArray.length; i++) { DocIdSetIterator iterator = docIdSetIteratorArray[i]; if (iterator.docID() < target) { iterator.advance(target); } } } @Override public int nextDoc() throws IOException { return advance(_docId + 1); } @Override public int docID() { return _docId; } @Override public long cost() { return cost; } }; } }; }
From source file:org.apache.solr.search.BitDocSet.java
License:Apache License
@Override public Filter getTopFilter() { final OpenBitSet bs = bits; // TODO: if cardinality isn't cached, do a quick measure of sparseness // and return null from bits() if too sparse. return new Filter() { @Override// ww w.ja va2 s . c o m public DocIdSet getDocIdSet(final AtomicReaderContext context, final Bits acceptDocs) { AtomicReader reader = context.reader(); // all Solr DocSets that are used as filters only include live docs final Bits acceptDocs2 = acceptDocs == null ? null : (reader.getLiveDocs() == acceptDocs ? null : acceptDocs); if (context.isTopLevel) { return BitsFilteredDocIdSet.wrap(bs, acceptDocs); } final int base = context.docBase; final int maxDoc = reader.maxDoc(); final int max = base + maxDoc; // one past the max doc in this segment. return BitsFilteredDocIdSet.wrap(new DocIdSet() { @Override public DocIdSetIterator iterator() { return new DocIdSetIterator() { int pos = base - 1; int adjustedDoc = -1; @Override public int docID() { return adjustedDoc; } @Override public int nextDoc() { pos = bs.nextSetBit(pos + 1); return adjustedDoc = (pos >= 0 && pos < max) ? pos - base : NO_MORE_DOCS; } @Override public int advance(int target) { if (target == NO_MORE_DOCS) return adjustedDoc = NO_MORE_DOCS; pos = bs.nextSetBit(target + base); return adjustedDoc = (pos >= 0 && pos < max) ? pos - base : NO_MORE_DOCS; } @Override public long cost() { // we don't want to actually compute cardinality, but // if its already been computed, we use it if (size != -1) { return size; } else { return bs.capacity(); } } }; } @Override public boolean isCacheable() { return true; } @Override public Bits bits() { return new Bits() { @Override public boolean get(int index) { return bs.fastGet(index + base); } @Override public int length() { return maxDoc; } }; } }, acceptDocs2); } }; }
From source file:org.apache.solr.search.DocSetBase.java
License:Apache License
@Override public Filter getTopFilter() { final OpenBitSet bs = getBits(); return new Filter() { @Override/* w w w.j av a 2s . c om*/ public DocIdSet getDocIdSet(final AtomicReaderContext context, Bits acceptDocs) { AtomicReader reader = context.reader(); // all Solr DocSets that are used as filters only include live docs final Bits acceptDocs2 = acceptDocs == null ? null : (reader.getLiveDocs() == acceptDocs ? null : acceptDocs); if (context.isTopLevel) { return BitsFilteredDocIdSet.wrap(bs, acceptDocs); } final int base = context.docBase; final int maxDoc = reader.maxDoc(); final int max = base + maxDoc; // one past the max doc in this segment. return BitsFilteredDocIdSet.wrap(new DocIdSet() { @Override public DocIdSetIterator iterator() { return new DocIdSetIterator() { int pos = base - 1; int adjustedDoc = -1; @Override public int docID() { return adjustedDoc; } @Override public int nextDoc() { pos = bs.nextSetBit(pos + 1); return adjustedDoc = (pos >= 0 && pos < max) ? pos - base : NO_MORE_DOCS; } @Override public int advance(int target) { if (target == NO_MORE_DOCS) return adjustedDoc = NO_MORE_DOCS; pos = bs.nextSetBit(target + base); return adjustedDoc = (pos >= 0 && pos < max) ? pos - base : NO_MORE_DOCS; } @Override public long cost() { return bs.capacity(); } }; } @Override public boolean isCacheable() { return true; } @Override public Bits bits() { // sparse filters should not use random access return null; } }, acceptDocs2); } }; }
From source file:org.apache.solr.search.function.ValueSourceRangeFilter.java
License:Apache License
@Override public DocIdSet getDocIdSet(final Map context, final AtomicReaderContext readerContext, Bits acceptDocs) throws IOException { return BitsFilteredDocIdSet.wrap(new DocIdSet() { @Override/*from w w w .java 2s . c o m*/ public DocIdSetIterator iterator() throws IOException { return valueSource.getValues(context, readerContext).getRangeScorer(readerContext.reader(), lowerVal, upperVal, includeLower, includeUpper); } @Override public Bits bits() { return null; // don't use random access } }, acceptDocs); }
From source file:org.apache.solr.search.QueryWrapperFilter.java
License:Apache License
@Override public DocIdSet getDocIdSet(final LeafReaderContext context, final Bits acceptDocs) throws IOException { // get a private context that is used to rewrite, createWeight and score eventually final LeafReaderContext privateContext = context.reader().getContext(); final Weight weight = new IndexSearcher(privateContext).createNormalizedWeight(query, false); DocIdSet set = new DocIdSet() { @Override/*from w ww. j a va2 s .c o m*/ public DocIdSetIterator iterator() throws IOException { Scorer scorer = weight.scorer(privateContext); return scorer == null ? null : scorer.iterator(); } @Override public long ramBytesUsed() { return 0L; } }; return BitsFilteredDocIdSet.wrap(set, acceptDocs); }
From source file:org.apache.solr.search.SortedIntDocSet.java
License:Apache License
@Override public Filter getTopFilter() { return new Filter() { int lastEndIdx = 0; @Override//from w ww. j a va 2 s. c om public DocIdSet getDocIdSet(final AtomicReaderContext context, final Bits acceptDocs) { AtomicReader reader = context.reader(); // all Solr DocSets that are used as filters only include live docs final Bits acceptDocs2 = acceptDocs == null ? null : (reader.getLiveDocs() == acceptDocs ? null : acceptDocs); final int base = context.docBase; final int maxDoc = reader.maxDoc(); final int max = base + maxDoc; // one past the max doc in this segment. int sidx = Math.max(0, lastEndIdx); if (sidx > 0 && docs[sidx - 1] >= base) { // oops, the lastEndIdx isn't correct... we must have been used // in a multi-threaded context, or the indexreaders are being // used out-of-order. start at 0. sidx = 0; } if (sidx < docs.length && docs[sidx] < base) { // if docs[sidx] is < base, we need to seek to find the real start. sidx = findIndex(docs, base, sidx, docs.length - 1); } final int startIdx = sidx; // Largest possible end index is limited to the start index // plus the number of docs contained in the segment. Subtract 1 since // the end index is inclusive. int eidx = Math.min(docs.length, startIdx + maxDoc) - 1; // find the real end eidx = findIndex(docs, max, startIdx, eidx) - 1; final int endIdx = eidx; lastEndIdx = endIdx; return BitsFilteredDocIdSet.wrap(new DocIdSet() { @Override public DocIdSetIterator iterator() { return new DocIdSetIterator() { int idx = startIdx; int adjustedDoc = -1; @Override public int docID() { return adjustedDoc; } @Override public int nextDoc() { return adjustedDoc = (idx > endIdx) ? NO_MORE_DOCS : (docs[idx++] - base); } @Override public int advance(int target) { if (idx > endIdx || target == NO_MORE_DOCS) return adjustedDoc = NO_MORE_DOCS; target += base; // probe next int rawDoc = docs[idx++]; if (rawDoc >= target) return adjustedDoc = rawDoc - base; int high = endIdx; // TODO: probe more before resorting to binary search? // binary search while (idx <= high) { int mid = (idx + high) >>> 1; rawDoc = docs[mid]; if (rawDoc < target) { idx = mid + 1; } else if (rawDoc > target) { high = mid - 1; } else { idx = mid + 1; return adjustedDoc = rawDoc - base; } } // low is on the insertion point... if (idx <= endIdx) { return adjustedDoc = docs[idx++] - base; } else { return adjustedDoc = NO_MORE_DOCS; } } @Override public long cost() { return docs.length; } }; } @Override public boolean isCacheable() { return true; } @Override public Bits bits() { // random access is expensive for this set return null; } }, acceptDocs2); } }; }
From source file:org.apache.solr.search.TestFilteredDocIdSet.java
License:Apache License
public void testFilteredDocIdSet() throws Exception { final int maxdoc = 10; final DocIdSet innerSet = new DocIdSet() { @Override// w ww. j a va2s .co m public long ramBytesUsed() { return 0L; } @Override public DocIdSetIterator iterator() { return new DocIdSetIterator() { int docid = -1; @Override public int docID() { return docid; } @Override public int nextDoc() { docid++; return docid < maxdoc ? docid : (docid = NO_MORE_DOCS); } @Override public int advance(int target) throws IOException { return slowAdvance(target); } @Override public long cost() { return 1; } }; } }; DocIdSet filteredSet = new FilteredDocIdSet(innerSet) { @Override protected boolean match(int docid) { return docid % 2 == 0; //validate only even docids } }; DocIdSetIterator iter = filteredSet.iterator(); ArrayList<Integer> list = new ArrayList<>(); int doc = iter.advance(3); if (doc != DocIdSetIterator.NO_MORE_DOCS) { list.add(Integer.valueOf(doc)); while ((doc = iter.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { list.add(Integer.valueOf(doc)); } } int[] docs = new int[list.size()]; int c = 0; Iterator<Integer> intIter = list.iterator(); while (intIter.hasNext()) { docs[c++] = intIter.next().intValue(); } int[] answer = new int[] { 4, 6, 8 }; boolean same = Arrays.equals(answer, docs); if (!same) { System.out.println("answer: " + Arrays.toString(answer)); System.out.println("gotten: " + Arrays.toString(docs)); fail(); } }
From source file:org.apache.solr.search.TestFilteredDocIdSet.java
License:Apache License
public void testNullIteratorFilteredDocIdSet() throws Exception { Directory dir = newDirectory();//from w ww. j av a2 s .c o m RandomIndexWriter writer = new RandomIndexWriter(random(), dir); Document doc = new Document(); doc.add(newStringField("c", "val", Field.Store.NO)); writer.addDocument(doc); IndexReader reader = writer.getReader(); writer.close(); // First verify the document is searchable. IndexSearcher searcher = newSearcher(reader); Assert.assertEquals(1, searcher.search(new MatchAllDocsQuery(), 10).totalHits); // Now search w/ a Filter which returns a null DocIdSet Filter f = new Filter() { @Override public DocIdSet getDocIdSet(LeafReaderContext context, Bits acceptDocs) { final DocIdSet innerNullIteratorSet = new DocIdSet() { @Override public DocIdSetIterator iterator() { return null; } @Override public long ramBytesUsed() { return 0L; } }; return new FilteredDocIdSet(innerNullIteratorSet) { @Override protected boolean match(int docid) { return true; } }; } @Override public String toString(String field) { return "nullDocIdSetFilter"; } @Override public boolean equals(Object other) { return other == this; } @Override public int hashCode() { return System.identityHashCode(this); } }; Query filtered = new BooleanQuery.Builder().add(new MatchAllDocsQuery(), Occur.MUST).add(f, Occur.FILTER) .build(); Assert.assertEquals(0, searcher.search(filtered, 10).totalHits); reader.close(); dir.close(); }