Example usage for org.apache.lucene.util FixedBitSet clear

List of usage examples for org.apache.lucene.util FixedBitSet clear

Introduction

In this page you can find the example usage for org.apache.lucene.util FixedBitSet clear.

Prototype

@Override
    public void clear(int startIndex, int endIndex) 

Source Link

Usage

From source file:org.apache.solr.search.facet.UniqueSlotAcc.java

License:Apache License

@Override
public void reset() {
    counts = null;/*from   w w  w  .  j  a va  2s . c o  m*/
    for (FixedBitSet bits : arr) {
        if (bits == null)
            continue;
        bits.clear(0, bits.length());
    }
}

From source file:org.elasticsearch.index.cache.docset.simple.SimpleDocSetCache.java

License:Apache License

@Override
public ContextDocIdSet obtain(AtomicReaderContext context) {
    Queue<FixedBitSet> docIdSets = cache.get(context.reader().getCoreCacheKey());
    if (docIdSets == null) {
        if (context.reader() instanceof SegmentReader) {
            ((SegmentReader) context.reader()).addCoreClosedListener(this);
        }/*w  ww.j av a 2 s  .co m*/
        cache.put(context.reader().getCoreCacheKey(), ConcurrentCollections.<FixedBitSet>newQueue());
        return new ContextDocIdSet(context, new FixedBitSet(context.reader().maxDoc()));
    }
    FixedBitSet docIdSet = docIdSets.poll();
    if (docIdSet == null) {
        docIdSet = new FixedBitSet(context.reader().maxDoc());
    } else {
        docIdSet.clear(0, docIdSet.length());
    }
    return new ContextDocIdSet(context, docIdSet);
}

From source file:org.elasticsearch.index.search.FieldDataTermsFilterTests.java

License:Apache License

@Test
public void testBytes() throws Exception {
    List<Integer> docs = Arrays.asList(1, 5, 7);

    ObjectOpenHashSet<BytesRef> hTerms = new ObjectOpenHashSet<BytesRef>();
    List<BytesRef> cTerms = new ArrayList<BytesRef>(docs.size());
    for (int i = 0; i < docs.size(); i++) {
        BytesRef term = new BytesRef("str" + docs.get(i));
        hTerms.add(term);/*w w w .ja  v  a2s . c  o m*/
        cTerms.add(term);
    }

    FieldDataTermsFilter hFilter = FieldDataTermsFilter.newBytes(getFieldData(strMapper), hTerms);

    int size = reader.maxDoc();
    FixedBitSet result = new FixedBitSet(size);

    result.clear(0, size);
    assertThat(result.cardinality(), equalTo(0));
    result.or(hFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
    assertThat(result.cardinality(), equalTo(docs.size()));
    for (int i = 0; i < reader.maxDoc(); i++) {
        assertThat(result.get(i), equalTo(docs.contains(i)));
    }

    // filter from mapper
    result.clear(0, size);
    assertThat(result.cardinality(), equalTo(0));
    result.or(strMapper.termsFilter(ifdService, cTerms, null)
            .getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
    assertThat(result.cardinality(), equalTo(docs.size()));
    for (int i = 0; i < reader.maxDoc(); i++) {
        assertThat(result.get(i), equalTo(docs.contains(i)));
    }

    result.clear(0, size);
    assertThat(result.cardinality(), equalTo(0));

    // filter on a numeric field using BytesRef terms
    // should not match any docs
    hFilter = FieldDataTermsFilter.newBytes(getFieldData(lngMapper), hTerms);
    result.or(hFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
    assertThat(result.cardinality(), equalTo(0));

    // filter on a numeric field using BytesRef terms
    // should not match any docs
    hFilter = FieldDataTermsFilter.newBytes(getFieldData(dblMapper), hTerms);
    result.or(hFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
    assertThat(result.cardinality(), equalTo(0));
}

From source file:org.elasticsearch.index.search.FieldDataTermsFilterTests.java

License:Apache License

@Test
public void testLongs() throws Exception {
    List<Integer> docs = Arrays.asList(1, 5, 7);

    LongOpenHashSet hTerms = new LongOpenHashSet();
    List<Long> cTerms = new ArrayList<Long>(docs.size());
    for (int i = 0; i < docs.size(); i++) {
        long term = docs.get(i).longValue();
        hTerms.add(term);//from  w  w  w . j a v a2s.  c  om
        cTerms.add(term);
    }

    FieldDataTermsFilter hFilter = FieldDataTermsFilter.newLongs(getFieldData(lngMapper), hTerms);

    int size = reader.maxDoc();
    FixedBitSet result = new FixedBitSet(size);

    result.clear(0, size);
    assertThat(result.cardinality(), equalTo(0));
    result.or(hFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
    assertThat(result.cardinality(), equalTo(docs.size()));
    for (int i = 0; i < reader.maxDoc(); i++) {
        assertThat(result.get(i), equalTo(docs.contains(i)));
    }

    // filter from mapper
    result.clear(0, size);
    assertThat(result.cardinality(), equalTo(0));
    result.or(lngMapper.termsFilter(ifdService, cTerms, null)
            .getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
    assertThat(result.cardinality(), equalTo(docs.size()));
    for (int i = 0; i < reader.maxDoc(); i++) {
        assertThat(result.get(i), equalTo(docs.contains(i)));
    }

    hFilter = FieldDataTermsFilter.newLongs(getFieldData(dblMapper), hTerms);
    assertNull(hFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()));
}

From source file:org.elasticsearch.index.search.FieldDataTermsFilterTests.java

License:Apache License

@Test
public void testDoubles() throws Exception {
    List<Integer> docs = Arrays.asList(1, 5, 7);

    DoubleOpenHashSet hTerms = new DoubleOpenHashSet();
    List<Double> cTerms = new ArrayList<Double>(docs.size());
    for (int i = 0; i < docs.size(); i++) {
        double term = Double.valueOf(docs.get(i));
        hTerms.add(term);//ww  w. j a  va  2  s  .  c om
        cTerms.add(term);
    }

    FieldDataTermsFilter hFilter = FieldDataTermsFilter.newDoubles(getFieldData(dblMapper), hTerms);

    int size = reader.maxDoc();
    FixedBitSet result = new FixedBitSet(size);

    result.clear(0, size);
    assertThat(result.cardinality(), equalTo(0));
    result.or(hFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
    assertThat(result.cardinality(), equalTo(docs.size()));
    for (int i = 0; i < reader.maxDoc(); i++) {
        assertThat(result.get(i), equalTo(docs.contains(i)));
    }

    // filter from mapper
    result.clear(0, size);
    assertThat(result.cardinality(), equalTo(0));
    result.or(dblMapper.termsFilter(ifdService, cTerms, null)
            .getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
    assertThat(result.cardinality(), equalTo(docs.size()));
    for (int i = 0; i < reader.maxDoc(); i++) {
        assertThat(result.get(i), equalTo(docs.contains(i)));
    }

    hFilter = FieldDataTermsFilter.newDoubles(getFieldData(lngMapper), hTerms);
    assertNull(hFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()));
}