List of usage examples for org.apache.lucene.util Bits length
int length();
From source file:de.blizzy.documentr.search.PageIndex.java
License:Open Source License
public Set<String> getAllTags(Authentication authentication) throws IOException, TimeoutException { IndexReader reader = null;/*from ww w . jav a 2 s.co m*/ IndexSearcher searcher = null; try { searcher = searcherManager.acquire(); Bits visibleDocs = getVisibleDocIds(searcher, authentication); Set<String> tags = Sets.newHashSet(); if (visibleDocs.length() > 0) { reader = searcher.getIndexReader(); Terms terms = MultiFields.getTerms(reader, TAG); if (terms != null) { TermsEnum termsEnum = terms.iterator(null); BytesRef ref; while ((ref = termsEnum.next()) != null) { DocsEnum docsEnum = termsEnum.docs(visibleDocs, null, 0); if (docsEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { tags.add(ref.utf8ToString()); } } } } return tags; } finally { if (searcher != null) { searcherManager.release(searcher); } } }
From source file:de.blizzy.documentr.search.PagePermissionFilter.java
License:Open Source License
@Override public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) { int docBase = context.docBase; BitSet result = new BitSet(); int acceptDocsLen = (acceptDocs != null) ? acceptDocs.length() : -1; for (int i = docBase; i < visibleDocIdsLength; i++) { int resultIdx = i - docBase; if ((acceptDocsLen >= 0) && (resultIdx >= acceptDocsLen)) { break; }/*from w ww .ja v a 2s. com*/ if (visibleDocIds.get(i) && ((acceptDocs == null) || acceptDocs.get(resultIdx))) { result.set(resultIdx); } } return new DocIdBitSet(result); }
From source file:de.blizzy.documentr.search.PagePermissionFilter.java
License:Open Source License
private BitSet toBitSet(Bits bits) { int len = bits.length(); BitSet result = new BitSet(len + 1); for (int i = 0; i < len; i++) { if (bits.get(i)) { result.set(i);/*from w w w.ja va 2s . c o m*/ } } return result; }
From source file:de.blizzy.documentr.search.TagFinder.java
License:Open Source License
public Set<String> getAllTags(Authentication authentication) throws IOException, TimeoutException { IndexReader reader = null;//from w w w. j ava 2 s. c o m IndexSearcher searcher = null; try { searcher = searcherManager.acquire(); // no point in running the task asynchronously here GetVisibleDocIdsTask visibleDocIdsTask = new GetVisibleDocIdsTask(searcher, authentication, userStore, permissionEvaluator, taskExecutor); Bits visibleDocIds = visibleDocIdsTask.call(); Set<String> tags = Sets.newHashSet(); if (visibleDocIds.length() > 0) { reader = searcher.getIndexReader(); Terms terms = MultiFields.getTerms(reader, PageIndex.TAG); if (terms != null) { TermsEnum termsEnum = terms.iterator(null); BytesRef ref; while ((ref = termsEnum.next()) != null) { DocsEnum docsEnum = termsEnum.docs(visibleDocIds, null, 0); if (docsEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { tags.add(ref.utf8ToString()); } } } } return tags; } finally { if (searcher != null) { searcherManager.release(searcher); } } }
From source file:de.unihildesheim.iw.lucene.util.BitsUtils.java
License:Open Source License
/** * Convert plain {@link Bits} instance to a {@link BitSet} instance. * * @param bits Bits to convert//from w w w . j ava 2s . c o m * @return New instance or {@code null} if {@code bits} were {@code null}. */ @Contract("null -> null; !null -> !null") @Nullable public static BitSet bits2BitSet(@Nullable final Bits bits) { if (bits == null) { return null; } if (BitSet.class.isInstance(bits)) { return (BitSet) bits; } final int bitCount = bits.length(); final FixedBitSet fbs = new FixedBitSet(bitCount); StreamUtils.stream(bits).forEach(fbs::set); return fbs; }
From source file:de.unihildesheim.iw.lucene.util.BitsUtils.java
License:Open Source License
/** * Convert plain {@link Bits} instance to a {@link FixedBitSet} instance. * * @param bits Bits to convert//from www .j a v a 2 s . co m * @return New instance or {@code null} if {@code bits} were {@code null}. */ @Contract("null -> null; !null -> !null") @Nullable public static FixedBitSet bits2FixedBitSet(@Nullable final Bits bits) { if (bits == null) { return null; } final FixedBitSet fbs; if (FixedBitSet.class.isInstance(bits)) { fbs = (FixedBitSet) bits; } else if (BitSet.class.isInstance(bits)) { fbs = new FixedBitSet(bits.length()); StreamUtils.stream((BitSet) bits).forEach(fbs::set); } else { final int bitCount = bits.length(); fbs = new FixedBitSet(bitCount); StreamUtils.stream(bits).forEach(fbs::set); } return fbs; }
From source file:lucene.security.index.SecureAtomicReader.java
License:Apache License
public static Bits getSecureLiveDocs(Bits bits, int maxDoc, final AccessControlReader accessControlReader) { final Bits liveDocs; if (bits == null) { liveDocs = getMatchAll(maxDoc);//w ww.j a v a 2 s .com } else { liveDocs = bits; } final int length = liveDocs.length(); Bits secureLiveDocs = new Bits() { @Override public boolean get(int index) { if (liveDocs.get(index)) { try { if (accessControlReader.hasAccess(ReadType.DOCS_ENUM, index)) { return true; } } catch (IOException e) { throw new RuntimeException(e); } } return false; } @Override public int length() { return length; } }; return secureLiveDocs; }
From source file:lucene.security.index.SecureAtomicReaderTestBase.java
License:Apache License
@Test public void testLiveDocs() throws IOException { SecureAtomicReader secureReader = getSecureReader(); Bits liveDocs = secureReader.getLiveDocs(); assertEquals(4, liveDocs.length()); assertTrue(liveDocs.get(0));/*from ww w.ja v a 2 s. c o m*/ assertTrue(liveDocs.get(1)); assertTrue(liveDocs.get(2)); assertFalse(liveDocs.get(3)); secureReader.close(); }
From source file:lucene.security.search.DocumentVisibilityFilter.java
License:Apache License
public static DocIdSet getLogicalOr(final List<DocIdSet> list) throws IOException { if (list.size() == 0) { return DocIdSet.EMPTY_DOCIDSET; }//from w w w . j a va 2s.c o m if (list.size() == 1) { DocIdSet docIdSet = list.get(0); Bits bits = docIdSet.bits(); if (bits == null) { throw new IOException("Bits are not allowed to be null for DocIdSet [" + docIdSet + "]."); } return docIdSet; } int index = 0; final Bits[] bitsArray = new Bits[list.size()]; int length = -1; for (DocIdSet docIdSet : list) { Bits bits = docIdSet.bits(); if (bits == null) { throw new IOException("Bits are not allowed to be null for DocIdSet [" + docIdSet + "]."); } bitsArray[index] = bits; index++; if (length < 0) { length = bits.length(); } else if (length != bits.length()) { throw new IOException( "Bits length need to be the same [" + length + "] and [" + bits.length() + "]"); } } final int len = length; return new DocIdSet() { @Override public Bits bits() throws IOException { return new Bits() { @Override public boolean get(int index) { for (int i = 0; i < bitsArray.length; i++) { if (bitsArray[i].get(index)) { return true; } } return false; } @Override public int length() { return len; } }; } @Override public boolean isCacheable() { return true; } @Override public DocIdSetIterator iterator() throws IOException { final DocIdSetIterator[] docIdSetIteratorArray = new DocIdSetIterator[list.size()]; long c = 0; int index = 0; for (DocIdSet docIdSet : list) { DocIdSetIterator iterator = docIdSet.iterator(); iterator.nextDoc(); docIdSetIteratorArray[index] = iterator; c += iterator.cost(); index++; } final long cost = c; return new DocIdSetIterator() { private int _docId = -1; @Override public int advance(int target) throws IOException { callAdvanceOnAllThatAreBehind(target); Arrays.sort(docIdSetIteratorArray, COMPARATOR); DocIdSetIterator iterator = docIdSetIteratorArray[0]; return _docId = iterator.docID(); } private void callAdvanceOnAllThatAreBehind(int target) throws IOException { for (int i = 0; i < docIdSetIteratorArray.length; i++) { DocIdSetIterator iterator = docIdSetIteratorArray[i]; if (iterator.docID() < target) { iterator.advance(target); } } } @Override public int nextDoc() throws IOException { return advance(_docId + 1); } @Override public int docID() { return _docId; } @Override public long cost() { return cost; } }; } }; }
From source file:org.apache.solr.search.CitationLRUCache.java
License:Apache License
private void warmIncrementally(SolrIndexSearcher searcher, SolrCache<K, V> old) throws IOException { if (regenerator == null) return;//from w w w. j a v a 2s. c o m //System.out.println("regenerator: " + regenerator); Map<String, List<String>> fields = getFields(searcher, this.identifierFields); if (fields.get("textClasses").size() > 0 || fields.get("textClassesMV").size() > 0) { synchronized (map) { treatIdentifiersAsText = true; } } long warmingStartTime = System.currentTimeMillis(); CitationLRUCache<K, V> other = (CitationLRUCache<K, V>) old; // collect ids of documents that need to be reloaded/regenerated during this // warmup run //System.out.println("searcher: " + searcher.toString()); //System.out.println("maxDoc: " + searcher.getIndexReader().maxDoc()); FixedBitSet toRefresh = new FixedBitSet(searcher.getIndexReader().maxDoc()); //System.out.println("version=" + searcher.getIndexReader().getVersion()); //try { //System.out.println("commit=" + searcher.getIndexReader().getIndexCommit()); //} catch (IOException e2) { // TODO Auto-generated catch block //e2.printStackTrace(); //} // for (IndexReaderContext c : searcher.getTopReaderContext().children()) { // //System.out.println("context=" + c.reader().getCombinedCoreAndDeletesKey()); // } // for (IndexReaderContext l : searcher.getIndexReader().leaves()) { // //System.out.println(l); // } Bits liveDocs = searcher.getAtomicReader().getLiveDocs(); //System.out.println(liveDocs == null ? "liveDocs=" + null : "liveDocs=" + liveDocs.length()); //System.out.println("numDeletes=" + searcher.getAtomicReader().numDeletedDocs()); if (liveDocs == null) { // everything is new, this could be fresh index or merged/optimized index too //searcher.getAtomicReader().getContext().children().size() //other.map.clear(); // force regeneration toRefresh.set(0, toRefresh.length()); // Build the mapping from indexed values into lucene ids // this must always be available, so we build it no matter what... // XXX: make it update only the necessary IDs (not the whole index) unInvertedTheDamnThing(searcher.getAtomicReader(), fields, liveDocs, new KVSetter() { @SuppressWarnings("unchecked") @Override public void set(int docbase, int docid, Object value) { put((K) value, (V) (Integer) (docbase + docid)); } }); } else if (liveDocs != null) { Integer luceneId; for (V v : other.map.values()) { luceneId = ((Integer) v); if (luceneId <= liveDocs.length() && !liveDocs.get(luceneId)) { // doc was either deleted or updated //System.out.println("Found deleted: " + luceneId); // retrieve all citations/references for this luceneId and mark these docs to be refreshed } } for (int i = 0; i < toRefresh.length(); i++) { if (liveDocs.get(i)) { toRefresh.set(i); } } } // warm entries if (isAutowarmingOn()) { Object[] keys, vals = null; // Don't do the autowarming in the synchronized block, just pull out the keys and values. synchronized (other.map) { int sz = autowarm.getWarmCount(other.map.size()); keys = new Object[sz]; vals = new Object[sz]; Iterator<Map.Entry<K, V>> iter = other.map.entrySet().iterator(); // iteration goes from oldest (least recently used) to most recently used, // so we need to skip over the oldest entries. int skip = other.map.size() - sz; for (int i = 0; i < skip; i++) iter.next(); for (int i = 0; i < sz; i++) { Map.Entry<K, V> entry = iter.next(); keys[i] = entry.getKey(); vals[i] = entry.getValue(); } } // autowarm from the oldest to the newest entries so that the ordering will be // correct in the new cache. for (int i = 0; i < keys.length; i++) { try { boolean continueRegen = true; if (isModified(liveDocs, keys[i], vals[i])) { toRefresh.set((Integer) keys[i]); } else { continueRegen = regenerator.regenerateItem(searcher, this, old, keys[i], vals[i]); } if (!continueRegen) break; } catch (Throwable e) { SolrException.log(log, "Error during auto-warming of key:" + keys[i], e); } } } warmupTime = System.currentTimeMillis() - warmingStartTime; }