Example usage for org.apache.lucene.index DocValues getNumeric

List of usage examples for org.apache.lucene.index DocValues getNumeric

Introduction

In this page you can find the example usage for org.apache.lucene.index DocValues getNumeric.

Prototype

public static NumericDocValues getNumeric(LeafReader reader, String field) throws IOException 

Source Link

Document

Returns NumericDocValues for the field, or #emptyNumeric() if it has none.

Usage

From source file:alba.solr.functions.FunctionQuery.java

License:Apache License

public void setLeafReader(LeafReader reader, String[] args) {

    this.readerContext = reader;

    //i = 0;/*www  .  ja  va 2 s.  c o m*/
    for (String arg : args) {
        //docValues[i] = 

    }

    try {
        //   this.docVals[0] =
        DocValues.getNumeric(this.readerContext, "id");
    } catch (IOException e) {
        // TODO Auto-generated catch block
        this.docVals = null;
        logger.error("error while readering docvals!", e);
    }

}

From source file:alba.solr.functions.FunctionQuery.java

License:Apache License

@Override
public long longVal(int doc) {
    NumericDocValues dv;/*from ww w .java2 s.co m*/
    try {
        dv = DocValues.getNumeric(this.readerContext, "id");
        return dv.get(doc);
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    return -1;
}

From source file:org.apache.solr.analytics.function.field.DateField.java

License:Apache License

@Override
public void doSetNextReader(LeafReaderContext context) throws IOException {
    docValues = DocValues.getNumeric(context.reader(), fieldName);
}

From source file:org.apache.solr.analytics.util.valuesource.DateFieldSource.java

License:Apache License

@Override
public FunctionValues getValues(Map context, LeafReaderContext readerContext) throws IOException {
    final NumericDocValues arr = DocValues.getNumeric(readerContext.reader(), field);
    final Bits valid = DocValues.getDocsWithField(readerContext.reader(), field);
    return new LongDocValues(this) {
        @Override/* www. j  av a2  s .  c  o  m*/
        public long longVal(int doc) {
            return arr.get(doc);
        }

        @Override
        public boolean exists(int doc) {
            return valid.get(doc);
        }

        @Override
        public Object objectVal(int doc) {
            return exists(doc) ? longToObject(arr.get(doc)) : null;
        }

        @Override
        public String strVal(int doc) {
            return exists(doc) ? longToString(arr.get(doc)) : null;
        }

        @Override
        public ValueFiller getValueFiller() {
            return new ValueFiller() {
                private final MutableValueDate mval = new MutableValueDate();

                @Override
                public MutableValue getValue() {
                    return mval;
                }

                @Override
                public void fillValue(int doc) {
                    mval.value = arr.get(doc);
                    mval.exists = exists(doc);
                }
            };
        }

    };
}

From source file:org.apache.solr.request.IntervalFacets.java

License:Apache License

private void getCountNumeric() throws IOException {
    final FieldType ft = schemaField.getType();
    final String fieldName = schemaField.getName();
    final NumericType numericType = ft.getNumericType();
    if (numericType == null) {
        throw new IllegalStateException();
    }/*from w  w  w .j  a  v a 2s.com*/
    final List<AtomicReaderContext> leaves = searcher.getIndexReader().leaves();

    final Iterator<AtomicReaderContext> ctxIt = leaves.iterator();
    AtomicReaderContext ctx = null;
    NumericDocValues longs = null;
    Bits docsWithField = null;
    for (DocIterator docsIt = docs.iterator(); docsIt.hasNext();) {
        final int doc = docsIt.nextDoc();
        if (ctx == null || doc >= ctx.docBase + ctx.reader().maxDoc()) {
            do {
                ctx = ctxIt.next();
            } while (ctx == null || doc >= ctx.docBase + ctx.reader().maxDoc());
            assert doc >= ctx.docBase;
            switch (numericType) {
            case LONG:
                longs = DocValues.getNumeric(ctx.reader(), fieldName);
                break;
            case INT:
                longs = DocValues.getNumeric(ctx.reader(), fieldName);
                break;
            case FLOAT:
                final NumericDocValues floats = DocValues.getNumeric(ctx.reader(), fieldName);
                // TODO: this bit flipping should probably be moved to tie-break in the PQ comparator
                longs = new NumericDocValues() {
                    @Override
                    public long get(int docID) {
                        long bits = floats.get(docID);
                        if (bits < 0)
                            bits ^= 0x7fffffffffffffffL;
                        return bits;
                    }
                };
                break;
            case DOUBLE:
                final NumericDocValues doubles = DocValues.getNumeric(ctx.reader(), fieldName);
                // TODO: this bit flipping should probably be moved to tie-break in the PQ comparator
                longs = new NumericDocValues() {
                    @Override
                    public long get(int docID) {
                        long bits = doubles.get(docID);
                        if (bits < 0)
                            bits ^= 0x7fffffffffffffffL;
                        return bits;
                    }
                };
                break;
            default:
                throw new AssertionError();
            }
            docsWithField = DocValues.getDocsWithField(ctx.reader(), schemaField.getName());
        }
        long v = longs.get(doc - ctx.docBase);
        if (v != 0 || docsWithField.get(doc - ctx.docBase)) {
            accumIntervalWithValue(v);
        }
    }
}

From source file:org.apache.solr.schema.TestPointFields.java

License:Apache License

private void doTestInternals(String field, String[] values) throws IOException {
    assertTrue(h.getCore().getLatestSchema().getField(field).getType() instanceof PointField);
    for (int i = 0; i < 10; i++) {
        assertU(adoc("id", String.valueOf(i), field, values[i]));
    }/*from w ww. j  a  va2  s. c om*/
    assertU(commit());
    IndexReader ir;
    RefCounted<SolrIndexSearcher> ref = null;
    SchemaField sf = h.getCore().getLatestSchema().getField(field);
    boolean ignoredField = !(sf.indexed() || sf.stored() || sf.hasDocValues());
    try {
        ref = h.getCore().getSearcher();
        SolrIndexSearcher searcher = ref.get();
        ir = searcher.getIndexReader();
        // our own SlowCompositeReader to check DocValues on disk w/o the UninvertingReader added by SolrIndexSearcher
        final LeafReader leafReaderForCheckingDVs = SlowCompositeReaderWrapper.wrap(searcher.getRawReader());

        if (sf.indexed()) {
            assertEquals("Field " + field + " should have point values", 10, PointValues.size(ir, field));
        } else {
            assertEquals("Field " + field + " should have no point values", 0, PointValues.size(ir, field));
        }
        if (ignoredField) {
            assertTrue("Field " + field + " should not have docValues",
                    DocValues.getSortedNumeric(leafReaderForCheckingDVs, field)
                            .nextDoc() == DocIdSetIterator.NO_MORE_DOCS);
            assertTrue("Field " + field + " should not have docValues", DocValues
                    .getNumeric(leafReaderForCheckingDVs, field).nextDoc() == DocIdSetIterator.NO_MORE_DOCS);
            assertTrue("Field " + field + " should not have docValues", DocValues
                    .getSorted(leafReaderForCheckingDVs, field).nextDoc() == DocIdSetIterator.NO_MORE_DOCS);
            assertTrue("Field " + field + " should not have docValues", DocValues
                    .getBinary(leafReaderForCheckingDVs, field).nextDoc() == DocIdSetIterator.NO_MORE_DOCS);
        } else {
            if (sf.hasDocValues()) {
                if (sf.multiValued()) {
                    assertFalse("Field " + field + " should have docValues",
                            DocValues.getSortedNumeric(leafReaderForCheckingDVs, field)
                                    .nextDoc() == DocIdSetIterator.NO_MORE_DOCS);
                } else {
                    assertFalse("Field " + field + " should have docValues",
                            DocValues.getNumeric(leafReaderForCheckingDVs, field)
                                    .nextDoc() == DocIdSetIterator.NO_MORE_DOCS);
                }
            } else {
                expectThrows(IllegalStateException.class,
                        () -> DocValues.getSortedNumeric(leafReaderForCheckingDVs, field));
                expectThrows(IllegalStateException.class,
                        () -> DocValues.getNumeric(leafReaderForCheckingDVs, field));
            }
            expectThrows(IllegalStateException.class,
                    () -> DocValues.getSorted(leafReaderForCheckingDVs, field));
            expectThrows(IllegalStateException.class,
                    () -> DocValues.getBinary(leafReaderForCheckingDVs, field));
        }
        for (LeafReaderContext leave : ir.leaves()) {
            LeafReader reader = leave.reader();
            for (int i = 0; i < reader.numDocs(); i++) {
                Document doc = reader.document(i);
                if (sf.stored()) {
                    assertNotNull("Field " + field + " not found. Doc: " + doc, doc.get(field));
                } else {
                    assertNull(doc.get(field));
                }
            }
        }
    } finally {
        ref.decref();
    }
    clearIndex();
    assertU(commit());
}

From source file:org.apache.solr.search.facet.FacetFieldProcessorByHashDV.java

License:Apache License

private void collectDocs() throws IOException {
    if (calc instanceof TermOrdCalc) { // Strings

        // TODO support SortedSetDocValues
        SortedDocValues globalDocValues = FieldUtil.getSortedDocValues(fcontext.qcontext, sf, null);
        ((TermOrdCalc) calc).lookupOrdFunction = ord -> {
            try {
                return globalDocValues.lookupOrd(ord);
            } catch (IOException e) {
                throw new RuntimeException(e);
            }//from   w w w .jav  a  2s  . c om
        };

        DocSetUtil.collectSortedDocSet(fcontext.base, fcontext.searcher.getIndexReader(),
                new SimpleCollector() {
                    SortedDocValues docValues = globalDocValues; // this segment/leaf. NN
                    LongValues toGlobal = LongValues.IDENTITY; // this segment to global ordinal. NN

                    @Override
                    public boolean needsScores() {
                        return false;
                    }

                    @Override
                    protected void doSetNextReader(LeafReaderContext ctx) throws IOException {
                        setNextReaderFirstPhase(ctx);
                        if (globalDocValues instanceof MultiDocValues.MultiSortedDocValues) {
                            MultiDocValues.MultiSortedDocValues multiDocValues = (MultiDocValues.MultiSortedDocValues) globalDocValues;
                            docValues = multiDocValues.values[ctx.ord];
                            toGlobal = multiDocValues.mapping.getGlobalOrds(ctx.ord);
                        }
                    }

                    @Override
                    public void collect(int segDoc) throws IOException {
                        if (segDoc > docValues.docID()) {
                            docValues.advance(segDoc);
                        }
                        if (segDoc == docValues.docID()) {
                            long val = toGlobal.get(docValues.ordValue());
                            collectValFirstPhase(segDoc, val);
                        }
                    }
                });

    } else { // Numeric:

        // TODO support SortedNumericDocValues
        DocSetUtil.collectSortedDocSet(fcontext.base, fcontext.searcher.getIndexReader(),
                new SimpleCollector() {
                    NumericDocValues values = null; //NN

                    @Override
                    public boolean needsScores() {
                        return false;
                    }

                    @Override
                    protected void doSetNextReader(LeafReaderContext ctx) throws IOException {
                        setNextReaderFirstPhase(ctx);
                        values = DocValues.getNumeric(ctx.reader(), sf.getName());
                    }

                    @Override
                    public void collect(int segDoc) throws IOException {
                        if (segDoc > values.docID()) {
                            values.advance(segDoc);
                        }
                        if (segDoc == values.docID()) {
                            collectValFirstPhase(segDoc, values.longValue());
                        }
                    }
                });
    }
}

From source file:org.apache.solr.search.facet.FacetFieldProcessorByHashNumeric.java

License:Apache License

private SimpleOrderedMap<Object> calcFacets() throws IOException {

    final FacetRangeProcessor.Calc calc = FacetRangeProcessor.getNumericCalc(sf);

    // TODO: it would be really nice to know the number of unique values!!!!

    int possibleValues = fcontext.base.size();
    // size smaller tables so that no resize will be necessary
    int currHashSize = BitUtil.nextHighestPowerOfTwo((int) (possibleValues * (1 / LongCounts.LOAD_FACTOR) + 1));
    currHashSize = Math.min(currHashSize, MAXIMUM_STARTING_TABLE_SIZE);
    final LongCounts table = new LongCounts(currHashSize) {
        @Override/*w w w . j a v  a  2  s. c  om*/
        protected void rehash() {
            super.rehash();
            doRehash(this);
            oldToNewMapping = null; // allow for gc
        }
    };

    int numSlots = currHashSize;

    int numMissing = 0;

    if (freq.allBuckets) {
        allBucketsSlot = numSlots++;
    }

    indexOrderAcc = new SlotAcc(fcontext) {
        @Override
        public void collect(int doc, int slot) throws IOException {
        }

        @Override
        public int compare(int slotA, int slotB) {
            long s1 = calc.bitsToSortableBits(table.vals[slotA]);
            long s2 = calc.bitsToSortableBits(table.vals[slotB]);
            return Long.compare(s1, s2);
        }

        @Override
        public Object getValue(int slotNum) throws IOException {
            return null;
        }

        @Override
        public void reset() {
        }

        @Override
        public void resize(Resizer resizer) {
        }
    };

    countAcc = new CountSlotAcc(fcontext) {
        @Override
        public void incrementCount(int slot, int count) {
            throw new UnsupportedOperationException();
        }

        @Override
        public int getCount(int slot) {
            return table.counts[slot];
        }

        @Override
        public Object getValue(int slotNum) {
            return getCount(slotNum);
        }

        @Override
        public void reset() {
            throw new UnsupportedOperationException();
        }

        @Override
        public void collect(int doc, int slot) throws IOException {
            throw new UnsupportedOperationException();
        }

        @Override
        public int compare(int slotA, int slotB) {
            return Integer.compare(table.counts[slotA], table.counts[slotB]);
        }

        @Override
        public void resize(Resizer resizer) {
            throw new UnsupportedOperationException();
        }
    };

    // we set the countAcc & indexAcc first so generic ones won't be created for us.
    createCollectAcc(fcontext.base.size(), numSlots);

    if (freq.allBuckets) {
        allBucketsAcc = new SpecialSlotAcc(fcontext, collectAcc, allBucketsSlot, otherAccs, 0);
    }

    NumericDocValues values = null;
    Bits docsWithField = null;

    // TODO: factor this code out so it can be shared...
    final List<LeafReaderContext> leaves = fcontext.searcher.getIndexReader().leaves();
    final Iterator<LeafReaderContext> ctxIt = leaves.iterator();
    LeafReaderContext ctx = null;
    int segBase = 0;
    int segMax;
    int adjustedMax = 0;
    for (DocIterator docsIt = fcontext.base.iterator(); docsIt.hasNext();) {
        final int doc = docsIt.nextDoc();
        if (doc >= adjustedMax) {
            do {
                ctx = ctxIt.next();
                segBase = ctx.docBase;
                segMax = ctx.reader().maxDoc();
                adjustedMax = segBase + segMax;
            } while (doc >= adjustedMax);
            assert doc >= ctx.docBase;
            setNextReaderFirstPhase(ctx);

            values = DocValues.getNumeric(ctx.reader(), sf.getName());
            docsWithField = DocValues.getDocsWithField(ctx.reader(), sf.getName());
        }

        int segDoc = doc - segBase;
        long val = values.get(segDoc);
        if (val != 0 || docsWithField.get(segDoc)) {
            int slot = table.add(val); // this can trigger a rehash rehash

            // countAcc.incrementCount(slot, 1);
            // our countAcc is virtual, so this is not needed

            collectFirstPhase(segDoc, slot);
        }
    }

    //
    // collection done, time to find the top slots
    //

    int numBuckets = 0;
    List<Object> bucketVals = null;
    if (freq.numBuckets && fcontext.isShard()) {
        bucketVals = new ArrayList<>(100);
    }

    int off = fcontext.isShard() ? 0 : (int) freq.offset;
    // add a modest amount of over-request if this is a shard request
    int lim = freq.limit >= 0 ? (fcontext.isShard() ? (int) (freq.limit * 1.1 + 4) : (int) freq.limit)
            : Integer.MAX_VALUE;

    int maxsize = (int) (freq.limit >= 0 ? freq.offset + lim : Integer.MAX_VALUE - 1);
    maxsize = Math.min(maxsize, table.cardinality);

    final int sortMul = freq.sortDirection.getMultiplier();

    PriorityQueue<Slot> queue = new PriorityQueue<Slot>(maxsize) {
        @Override
        protected boolean lessThan(Slot a, Slot b) {
            // TODO: sort-by-index-order
            int cmp = sortAcc.compare(a.slot, b.slot) * sortMul;
            return cmp == 0 ? (indexOrderAcc.compare(a.slot, b.slot) > 0) : cmp < 0;
        }
    };

    // TODO: create a countAcc that wrapps the table so we can reuse more code?

    Slot bottom = null;
    for (int i = 0; i < table.counts.length; i++) {
        int count = table.counts[i];
        if (count < effectiveMincount) {
            // either not a valid slot, or count not high enough
            continue;
        }
        numBuckets++; // can be different from the table cardinality if mincount > 1

        long val = table.vals[i];
        if (bucketVals != null && bucketVals.size() < 100) {
            bucketVals.add(calc.bitsToValue(val));
        }

        if (bottom == null) {
            bottom = new Slot();
        }
        bottom.slot = i;

        bottom = queue.insertWithOverflow(bottom);
    }

    SimpleOrderedMap<Object> res = new SimpleOrderedMap<>();
    if (freq.numBuckets) {
        if (!fcontext.isShard()) {
            res.add("numBuckets", numBuckets);
        } else {
            SimpleOrderedMap<Object> map = new SimpleOrderedMap<>(2);
            map.add("numBuckets", numBuckets);
            map.add("vals", bucketVals);
            res.add("numBuckets", map);
        }
    }

    FacetDebugInfo fdebug = fcontext.getDebugInfo();
    if (fdebug != null)
        fdebug.putInfoItem("numBuckets", (long) numBuckets);

    if (freq.allBuckets) {
        SimpleOrderedMap<Object> allBuckets = new SimpleOrderedMap<>();
        // countAcc.setValues(allBuckets, allBucketsSlot);
        allBuckets.add("count", table.numAdds);
        allBucketsAcc.setValues(allBuckets, -1);
        // allBuckets currently doesn't execute sub-facets (because it doesn't change the domain?)
        res.add("allBuckets", allBuckets);
    }

    if (freq.missing) {
        // TODO: it would be more efficient to buid up a missing DocSet if we need it here anyway.

        SimpleOrderedMap<Object> missingBucket = new SimpleOrderedMap<>();
        fillBucket(missingBucket, getFieldMissingQuery(fcontext.searcher, freq.field), null);
        res.add("missing", missingBucket);
    }

    // if we are deep paging, we don't have to order the highest "offset" counts.
    int collectCount = Math.max(0, queue.size() - off);
    assert collectCount <= lim;
    int[] sortedSlots = new int[collectCount];
    for (int i = collectCount - 1; i >= 0; i--) {
        sortedSlots[i] = queue.pop().slot;
    }

    ArrayList<SimpleOrderedMap> bucketList = new ArrayList<>(collectCount);
    res.add("buckets", bucketList);

    boolean needFilter = deferredAggs != null || freq.getSubFacets().size() > 0;

    for (int slotNum : sortedSlots) {
        SimpleOrderedMap<Object> bucket = new SimpleOrderedMap<>();
        Comparable val = calc.bitsToValue(table.vals[slotNum]);
        bucket.add("val", val);

        Query filter = needFilter ? sf.getType().getFieldQuery(null, sf, calc.formatValue(val)) : null;

        fillBucket(bucket, table.counts[slotNum], slotNum, null, filter);

        bucketList.add(bucket);
    }

    return res;
}

From source file:org.apache.solr.search.facet.FacetFieldProcessorNumeric.java

License:Apache License

public SimpleOrderedMap<Object> calcFacets() throws IOException {

    final FacetRangeProcessor.Calc calc = FacetRangeProcessor.getNumericCalc(sf);

    // TODO: it would be really nice to know the number of unique values!!!!

    int possibleValues = fcontext.base.size();
    // size smaller tables so that no resize will be necessary
    int currHashSize = BitUtil.nextHighestPowerOfTwo((int) (possibleValues * (1 / LongCounts.LOAD_FACTOR) + 1));
    currHashSize = Math.min(currHashSize, MAXIMUM_STARTING_TABLE_SIZE);
    final LongCounts table = new LongCounts(currHashSize) {
        @Override//from  ww w .  j a  va  2  s  .co  m
        protected void rehash() {
            super.rehash();
            doRehash(this);
            oldToNewMapping = null; // allow for gc
        }
    };

    int numSlots = currHashSize;

    int numMissing = 0;

    if (freq.allBuckets) {
        allBucketsSlot = numSlots++;
    }

    indexOrderAcc = new SlotAcc(fcontext) {
        @Override
        public void collect(int doc, int slot) throws IOException {
        }

        @Override
        public int compare(int slotA, int slotB) {
            long s1 = calc.bitsToSortableBits(table.vals[slotA]);
            long s2 = calc.bitsToSortableBits(table.vals[slotB]);
            return Long.compare(s1, s2);
        }

        @Override
        public Object getValue(int slotNum) throws IOException {
            return null;
        }

        @Override
        public void reset() {
        }

        @Override
        public void resize(Resizer resizer) {
        }
    };

    countAcc = new CountSlotAcc(fcontext) {
        @Override
        public void incrementCount(int slot, int count) {
            throw new UnsupportedOperationException();
        }

        @Override
        public int getCount(int slot) {
            return table.counts[slot];
        }

        @Override
        public Object getValue(int slotNum) {
            return getCount(slotNum);
        }

        @Override
        public void reset() {
            throw new UnsupportedOperationException();
        }

        @Override
        public void collect(int doc, int slot) throws IOException {
            throw new UnsupportedOperationException();
        }

        @Override
        public int compare(int slotA, int slotB) {
            return Integer.compare(table.counts[slotA], table.counts[slotB]);
        }

        @Override
        public void resize(Resizer resizer) {
            throw new UnsupportedOperationException();
        }
    };

    // we set the countAcc & indexAcc first so generic ones won't be created for us.
    createCollectAcc(fcontext.base.size(), numSlots);

    if (freq.allBuckets) {
        allBucketsAcc = new SpecialSlotAcc(fcontext, collectAcc, allBucketsSlot, otherAccs, 0);
    }

    NumericDocValues values = null;
    Bits docsWithField = null;

    // TODO: factor this code out so it can be shared...
    final List<LeafReaderContext> leaves = fcontext.searcher.getIndexReader().leaves();
    final Iterator<LeafReaderContext> ctxIt = leaves.iterator();
    LeafReaderContext ctx = null;
    int segBase = 0;
    int segMax;
    int adjustedMax = 0;
    for (DocIterator docsIt = fcontext.base.iterator(); docsIt.hasNext();) {
        final int doc = docsIt.nextDoc();
        if (doc >= adjustedMax) {
            do {
                ctx = ctxIt.next();
                segBase = ctx.docBase;
                segMax = ctx.reader().maxDoc();
                adjustedMax = segBase + segMax;
            } while (doc >= adjustedMax);
            assert doc >= ctx.docBase;
            setNextReaderFirstPhase(ctx);

            values = DocValues.getNumeric(ctx.reader(), sf.getName());
            docsWithField = DocValues.getDocsWithField(ctx.reader(), sf.getName());
        }

        int segDoc = doc - segBase;
        long val = values.get(segDoc);
        if (val != 0 || docsWithField.get(segDoc)) {
            int slot = table.add(val); // this can trigger a rehash rehash

            // countAcc.incrementCount(slot, 1);
            // our countAcc is virtual, so this is not needed

            collectFirstPhase(segDoc, slot);
        }
    }

    //
    // collection done, time to find the top slots
    //

    int numBuckets = 0;
    List<Object> bucketVals = null;
    if (freq.numBuckets && fcontext.isShard()) {
        bucketVals = new ArrayList(100);
    }

    int off = fcontext.isShard() ? 0 : (int) freq.offset;
    // add a modest amount of over-request if this is a shard request
    int lim = freq.limit >= 0 ? (fcontext.isShard() ? (int) (freq.limit * 1.1 + 4) : (int) freq.limit)
            : Integer.MAX_VALUE;

    int maxsize = (int) (freq.limit >= 0 ? freq.offset + lim : Integer.MAX_VALUE - 1);
    maxsize = Math.min(maxsize, table.cardinality);

    final int sortMul = freq.sortDirection.getMultiplier();

    PriorityQueue<Slot> queue = new PriorityQueue<Slot>(maxsize) {
        @Override
        protected boolean lessThan(Slot a, Slot b) {
            // TODO: sort-by-index-order
            int cmp = sortAcc.compare(a.slot, b.slot) * sortMul;
            return cmp == 0 ? (indexOrderAcc.compare(a.slot, b.slot) > 0) : cmp < 0;
        }
    };

    // TODO: create a countAcc that wrapps the table so we can reuse more code?

    Slot bottom = null;
    for (int i = 0; i < table.counts.length; i++) {
        int count = table.counts[i];
        if (count < effectiveMincount) {
            // either not a valid slot, or count not high enough
            continue;
        }
        numBuckets++; // can be different from the table cardinality if mincount > 1

        long val = table.vals[i];
        if (bucketVals != null && bucketVals.size() < 100) {
            bucketVals.add(calc.bitsToValue(val));
        }

        if (bottom == null) {
            bottom = new Slot();
        }
        bottom.slot = i;

        bottom = queue.insertWithOverflow(bottom);
    }

    SimpleOrderedMap res = new SimpleOrderedMap();
    if (freq.numBuckets) {
        if (!fcontext.isShard()) {
            res.add("numBuckets", numBuckets);
        } else {
            SimpleOrderedMap map = new SimpleOrderedMap(2);
            map.add("numBuckets", numBuckets);
            map.add("vals", bucketVals);
            res.add("numBuckets", map);
        }
    }

    FacetDebugInfo fdebug = fcontext.getDebugInfo();
    if (fdebug != null)
        fdebug.putInfoItem("numBuckets", new Long(numBuckets));

    if (freq.allBuckets) {
        SimpleOrderedMap<Object> allBuckets = new SimpleOrderedMap<>();
        // countAcc.setValues(allBuckets, allBucketsSlot);
        allBuckets.add("count", table.numAdds);
        allBucketsAcc.setValues(allBuckets, -1);
        // allBuckets currently doesn't execute sub-facets (because it doesn't change the domain?)
        res.add("allBuckets", allBuckets);
    }

    if (freq.missing) {
        // TODO: it would be more efficient to buid up a missing DocSet if we need it here anyway.

        SimpleOrderedMap<Object> missingBucket = new SimpleOrderedMap<>();
        fillBucket(missingBucket, getFieldMissingQuery(fcontext.searcher, freq.field), null);
        res.add("missing", missingBucket);
    }

    // if we are deep paging, we don't have to order the highest "offset" counts.
    int collectCount = Math.max(0, queue.size() - off);
    assert collectCount <= lim;
    int[] sortedSlots = new int[collectCount];
    for (int i = collectCount - 1; i >= 0; i--) {
        sortedSlots[i] = queue.pop().slot;
    }

    ArrayList bucketList = new ArrayList(collectCount);
    res.add("buckets", bucketList);

    boolean needFilter = deferredAggs != null || freq.getSubFacets().size() > 0;

    for (int slotNum : sortedSlots) {
        SimpleOrderedMap<Object> bucket = new SimpleOrderedMap<>();
        Comparable val = calc.bitsToValue(table.vals[slotNum]);
        bucket.add("val", val);

        Query filter = needFilter ? sf.getType().getFieldQuery(null, sf, calc.formatValue(val)) : null;

        fillBucket(bucket, table.counts[slotNum], slotNum, null, filter);

        bucketList.add(bucket);
    }

    return res;
}

From source file:org.eclipse.dltk.internal.core.index.lucene.BitFlagsQuery.java

License:Open Source License

/**
 * Finds and returns matching doc ID set.
 * //from  w w  w.  ja va 2s  .  c o  m
 * @param context
 * @param acceptDocs
 * @return matching doc ID set
 * @throws IOException
 */
protected DocIdSet getDocIdSet(final LeafReaderContext context, Bits acceptDocs) throws IOException {
    final NumericDocValues numDocValues = DocValues.getNumeric(context.reader(), IndexFields.NDV_FLAGS);
    return new DocValuesDocIdSet(context.reader().maxDoc(), acceptDocs) {
        @Override
        protected boolean matchDoc(int doc) {
            long flags = numDocValues.get(doc);
            if (fTrueFlags != 0) {
                if ((flags & fTrueFlags) == 0) {
                    return false;
                }
            }
            if (fFalseFlags != 0) {
                if ((flags & fFalseFlags) != 0) {
                    return false;
                }
            }
            return true;
        }
    };
}