Example usage for org.apache.lucene.index SortedDocValues lookupOrd

List of usage examples for org.apache.lucene.index SortedDocValues lookupOrd

Introduction

In this page you can find the example usage for org.apache.lucene.index SortedDocValues lookupOrd.

Prototype

public abstract BytesRef lookupOrd(int ord) throws IOException;

Source Link

Document

Retrieves the value for the specified ordinal.

Usage

From source file:org.alfresco.solr.transformer.DocValueDocTransformer.java

License:Open Source License

@Override
public void transform(SolrDocument doc, int docid, float score) throws IOException {
    for (String fieldName : context.getSearcher().getFieldNames()) {
        SchemaField schemaField = context.getSearcher().getSchema().getFieldOrNull(fieldName);
        if (schemaField != null) {
            if (schemaField.hasDocValues()) {
                SortedDocValues sortedDocValues = context.getSearcher().getSlowAtomicReader()
                        .getSortedDocValues(fieldName);
                if (sortedDocValues != null) {
                    int ordinal = sortedDocValues.getOrd(docid);
                    if (ordinal > -1) {
                        doc.removeFields(fieldName);
                        String alfrescoFieldName = AlfrescoSolrDataModel.getInstance()
                                .getAlfrescoPropertyFromSchemaField(fieldName);
                        doc.removeFields(alfrescoFieldName);
                        doc.addField(alfrescoFieldName, schemaField.getType().toObject(schemaField,
                                sortedDocValues.lookupOrd(ordinal)));
                    }/*w ww.ja  v  a 2  s  .  c om*/
                }

                SortedSetDocValues sortedSetDocValues = context.getSearcher().getSlowAtomicReader()
                        .getSortedSetDocValues(fieldName);
                if (sortedSetDocValues != null) {
                    ArrayList<Object> newValues = new ArrayList<Object>();
                    sortedSetDocValues.setDocument(docid);
                    long ordinal;
                    while ((ordinal = sortedSetDocValues.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) {
                        newValues.add(schemaField.getType().toObject(schemaField,
                                sortedSetDocValues.lookupOrd(ordinal)));
                    }
                    doc.removeFields(fieldName);
                    String alfrescoFieldName = AlfrescoSolrDataModel.getInstance()
                            .getAlfrescoPropertyFromSchemaField(fieldName);
                    doc.removeFields(alfrescoFieldName);
                    doc.addField(alfrescoFieldName, newValues);
                }

                BinaryDocValues binaryDocValues = context.getSearcher().getSlowAtomicReader()
                        .getBinaryDocValues(fieldName);
                if (binaryDocValues != null) {
                    doc.removeFields(fieldName);
                    String alfrescoFieldName = AlfrescoSolrDataModel.getInstance()
                            .getAlfrescoPropertyFromSchemaField(fieldName);
                    doc.removeFields(alfrescoFieldName);
                    doc.addField(alfrescoFieldName,
                            schemaField.getType().toObject(schemaField, binaryDocValues.get(docid)));
                }

                if (schemaField.getType().getNumericType() != null) {
                    NumericDocValues numericDocValues = context.getSearcher().getSlowAtomicReader()
                            .getNumericDocValues(fieldName);
                    if (numericDocValues != null) {
                        doc.removeFields(fieldName);
                        String alfrescoFieldName = AlfrescoSolrDataModel.getInstance()
                                .getAlfrescoPropertyFromSchemaField(fieldName);
                        doc.removeFields(alfrescoFieldName);
                        switch (schemaField.getType().getNumericType()) {
                        case DOUBLE:
                            doc.addField(alfrescoFieldName,
                                    Double.longBitsToDouble(numericDocValues.get(docid)));
                            break;
                        case FLOAT:
                            doc.addField(alfrescoFieldName,
                                    Float.intBitsToFloat((int) numericDocValues.get(docid)));
                            break;
                        case INT:
                            doc.addField(alfrescoFieldName, (int) numericDocValues.get(docid));
                            break;
                        case LONG:
                            doc.addField(alfrescoFieldName, numericDocValues.get(docid));
                            break;
                        }
                    }

                    SortedNumericDocValues sortedNumericDocValues = context.getSearcher().getSlowAtomicReader()
                            .getSortedNumericDocValues(fieldName);
                    if (sortedNumericDocValues != null) {
                        sortedNumericDocValues.setDocument(docid);
                        doc.removeFields(fieldName);
                        String alfrescoFieldName = AlfrescoSolrDataModel.getInstance()
                                .getAlfrescoPropertyFromSchemaField(fieldName);
                        doc.removeFields(alfrescoFieldName);
                        ArrayList<Object> newValues = new ArrayList<Object>(sortedNumericDocValues.count());
                        if (sortedNumericDocValues.count() > 0) {

                            for (int i = 0; i < sortedNumericDocValues.count(); i++) {
                                switch (schemaField.getType().getNumericType()) {
                                case DOUBLE:
                                    newValues.add(NumericUtils
                                            .sortableLongToDouble(sortedNumericDocValues.valueAt(i)));
                                    break;
                                case FLOAT:
                                    newValues.add(NumericUtils
                                            .sortableIntToFloat((int) sortedNumericDocValues.valueAt(i)));
                                    break;
                                case INT:
                                    newValues.add((int) sortedNumericDocValues.valueAt(i));
                                    break;
                                case LONG:
                                    newValues.add(sortedNumericDocValues.valueAt(i));
                                    break;

                                }
                            }
                        }
                        doc.addField(alfrescoFieldName, newValues);

                    }
                }
            }
        }
    }

}

From source file:org.apache.solr.handler.component.ExpandComponent.java

License:Apache License

@SuppressWarnings("unchecked")
@Override/*w w  w .  j  a va 2 s  . c o  m*/
public void process(ResponseBuilder rb) throws IOException {

    if (!rb.doExpand) {
        return;
    }

    SolrQueryRequest req = rb.req;
    SolrParams params = req.getParams();

    boolean isShard = params.getBool(ShardParams.IS_SHARD, false);
    String ids = params.get(ShardParams.IDS);

    if (ids == null && isShard) {
        return;
    }

    String field = params.get(ExpandParams.EXPAND_FIELD);
    if (field == null) {
        List<Query> filters = rb.getFilters();
        if (filters != null) {
            for (Query q : filters) {
                if (q instanceof CollapsingQParserPlugin.CollapsingPostFilter) {
                    CollapsingQParserPlugin.CollapsingPostFilter cp = (CollapsingQParserPlugin.CollapsingPostFilter) q;
                    field = cp.getField();
                }
            }
        }
    }

    if (field == null) {
        throw new IOException("Expand field is null.");
    }

    String sortParam = params.get(ExpandParams.EXPAND_SORT);
    String[] fqs = params.getParams(ExpandParams.EXPAND_FQ);
    String qs = params.get(ExpandParams.EXPAND_Q);
    int limit = params.getInt(ExpandParams.EXPAND_ROWS, 5);

    Sort sort = null;

    if (sortParam != null) {
        sort = QueryParsing.parseSortSpec(sortParam, rb.req).getSort();
    }

    Query query;
    if (qs == null) {
        query = rb.getQuery();
    } else {
        try {
            QParser parser = QParser.getParser(qs, null, req);
            query = parser.getQuery();
        } catch (Exception e) {
            throw new IOException(e);
        }
    }

    List<Query> newFilters = new ArrayList<>();

    if (fqs == null) {
        List<Query> filters = rb.getFilters();
        if (filters != null) {
            for (Query q : filters) {
                if (!(q instanceof CollapsingQParserPlugin.CollapsingPostFilter)) {
                    newFilters.add(q);
                }
            }
        }
    } else {
        try {
            for (String fq : fqs) {
                if (fq != null && fq.trim().length() != 0 && !fq.equals("*:*")) {
                    QParser fqp = QParser.getParser(fq, null, req);
                    newFilters.add(fqp.getQuery());
                }
            }
        } catch (Exception e) {
            throw new IOException(e);
        }
    }

    SolrIndexSearcher searcher = req.getSearcher();
    AtomicReader reader = searcher.getAtomicReader();
    SortedDocValues values = FieldCache.DEFAULT.getTermsIndex(reader, field);
    FixedBitSet groupBits = new FixedBitSet(values.getValueCount());
    DocList docList = rb.getResults().docList;
    IntOpenHashSet collapsedSet = new IntOpenHashSet(docList.size() * 2);

    DocIterator idit = docList.iterator();

    while (idit.hasNext()) {
        int doc = idit.nextDoc();
        int ord = values.getOrd(doc);
        if (ord > -1) {
            groupBits.set(ord);
            collapsedSet.add(doc);
        }
    }

    Collector collector;
    if (sort != null)
        sort = sort.rewrite(searcher);
    GroupExpandCollector groupExpandCollector = new GroupExpandCollector(values, groupBits, collapsedSet, limit,
            sort);
    SolrIndexSearcher.ProcessedFilter pfilter = searcher.getProcessedFilter(null, newFilters);
    if (pfilter.postFilter != null) {
        pfilter.postFilter.setLastDelegate(groupExpandCollector);
        collector = pfilter.postFilter;
    } else {
        collector = groupExpandCollector;
    }

    searcher.search(query, pfilter.filter, collector);
    IntObjectMap groups = groupExpandCollector.getGroups();
    Map<String, DocSlice> outMap = new HashMap();
    CharsRef charsRef = new CharsRef();
    FieldType fieldType = searcher.getSchema().getField(field).getType();
    for (IntObjectCursor cursor : (Iterable<IntObjectCursor>) groups) {
        int ord = cursor.key;
        TopDocsCollector topDocsCollector = (TopDocsCollector) cursor.value;
        TopDocs topDocs = topDocsCollector.topDocs();
        ScoreDoc[] scoreDocs = topDocs.scoreDocs;
        if (scoreDocs.length > 0) {
            int[] docs = new int[scoreDocs.length];
            float[] scores = new float[scoreDocs.length];
            for (int i = 0; i < docs.length; i++) {
                ScoreDoc scoreDoc = scoreDocs[i];
                docs[i] = scoreDoc.doc;
                scores[i] = scoreDoc.score;
            }
            DocSlice slice = new DocSlice(0, docs.length, docs, scores, topDocs.totalHits,
                    topDocs.getMaxScore());
            final BytesRef bytesRef = values.lookupOrd(ord);
            fieldType.indexedToReadable(bytesRef, charsRef);
            String group = charsRef.toString();
            outMap.put(group, slice);
        }
    }

    rb.rsp.add("expanded", outMap);
}

From source file:org.apache.solr.search.facet.FacetFieldProcessorByHashDV.java

License:Apache License

private void collectDocs() throws IOException {
    if (calc instanceof TermOrdCalc) { // Strings

        // TODO support SortedSetDocValues
        SortedDocValues globalDocValues = FieldUtil.getSortedDocValues(fcontext.qcontext, sf, null);
        ((TermOrdCalc) calc).lookupOrdFunction = ord -> {
            try {
                return globalDocValues.lookupOrd(ord);
            } catch (IOException e) {
                throw new RuntimeException(e);
            }/*from  w  w w  .  ja v  a  2 s .  c  o  m*/
        };

        DocSetUtil.collectSortedDocSet(fcontext.base, fcontext.searcher.getIndexReader(),
                new SimpleCollector() {
                    SortedDocValues docValues = globalDocValues; // this segment/leaf. NN
                    LongValues toGlobal = LongValues.IDENTITY; // this segment to global ordinal. NN

                    @Override
                    public boolean needsScores() {
                        return false;
                    }

                    @Override
                    protected void doSetNextReader(LeafReaderContext ctx) throws IOException {
                        setNextReaderFirstPhase(ctx);
                        if (globalDocValues instanceof MultiDocValues.MultiSortedDocValues) {
                            MultiDocValues.MultiSortedDocValues multiDocValues = (MultiDocValues.MultiSortedDocValues) globalDocValues;
                            docValues = multiDocValues.values[ctx.ord];
                            toGlobal = multiDocValues.mapping.getGlobalOrds(ctx.ord);
                        }
                    }

                    @Override
                    public void collect(int segDoc) throws IOException {
                        if (segDoc > docValues.docID()) {
                            docValues.advance(segDoc);
                        }
                        if (segDoc == docValues.docID()) {
                            long val = toGlobal.get(docValues.ordValue());
                            collectValFirstPhase(segDoc, val);
                        }
                    }
                });

    } else { // Numeric:

        // TODO support SortedNumericDocValues
        DocSetUtil.collectSortedDocSet(fcontext.base, fcontext.searcher.getIndexReader(),
                new SimpleCollector() {
                    NumericDocValues values = null; //NN

                    @Override
                    public boolean needsScores() {
                        return false;
                    }

                    @Override
                    protected void doSetNextReader(LeafReaderContext ctx) throws IOException {
                        setNextReaderFirstPhase(ctx);
                        values = DocValues.getNumeric(ctx.reader(), sf.getName());
                    }

                    @Override
                    public void collect(int segDoc) throws IOException {
                        if (segDoc > values.docID()) {
                            values.advance(segDoc);
                        }
                        if (segDoc == values.docID()) {
                            collectValFirstPhase(segDoc, values.longValue());
                        }
                    }
                });
    }
}

From source file:org.apache.solr.uninverting.TestFieldCache.java

License:Apache License

public void test() throws IOException {
    FieldCache cache = FieldCache.DEFAULT;
    NumericDocValues doubles = cache.getNumerics(reader, "theDouble", FieldCache.DOUBLE_POINT_PARSER);
    for (int i = 0; i < NUM_DOCS; i++) {
        assertEquals(i, doubles.nextDoc());
        assertEquals(Double.doubleToLongBits(Double.MAX_VALUE - i), doubles.longValue());
    }/*from www  .  j ava  2 s  .c o m*/

    NumericDocValues longs = cache.getNumerics(reader, "theLong", FieldCache.LONG_POINT_PARSER);
    for (int i = 0; i < NUM_DOCS; i++) {
        assertEquals(i, longs.nextDoc());
        assertEquals(Long.MAX_VALUE - i, longs.longValue());
    }

    NumericDocValues ints = cache.getNumerics(reader, "theInt", FieldCache.INT_POINT_PARSER);
    for (int i = 0; i < NUM_DOCS; i++) {
        assertEquals(i, ints.nextDoc());
        assertEquals(Integer.MAX_VALUE - i, ints.longValue());
    }

    NumericDocValues floats = cache.getNumerics(reader, "theFloat", FieldCache.FLOAT_POINT_PARSER);
    for (int i = 0; i < NUM_DOCS; i++) {
        assertEquals(i, floats.nextDoc());
        assertEquals(Float.floatToIntBits(Float.MAX_VALUE - i), floats.longValue());
    }

    Bits docsWithField = cache.getDocsWithField(reader, "theLong", FieldCache.LONG_POINT_PARSER);
    assertSame("Second request to cache return same array", docsWithField,
            cache.getDocsWithField(reader, "theLong", FieldCache.LONG_POINT_PARSER));
    assertTrue("docsWithField(theLong) must be class Bits.MatchAllBits",
            docsWithField instanceof Bits.MatchAllBits);
    assertTrue("docsWithField(theLong) Size: " + docsWithField.length() + " is not: " + NUM_DOCS,
            docsWithField.length() == NUM_DOCS);
    for (int i = 0; i < docsWithField.length(); i++) {
        assertTrue(docsWithField.get(i));
    }

    docsWithField = cache.getDocsWithField(reader, "sparse", FieldCache.INT_POINT_PARSER);
    assertSame("Second request to cache return same array", docsWithField,
            cache.getDocsWithField(reader, "sparse", FieldCache.INT_POINT_PARSER));
    assertFalse("docsWithField(sparse) must not be class Bits.MatchAllBits",
            docsWithField instanceof Bits.MatchAllBits);
    assertTrue("docsWithField(sparse) Size: " + docsWithField.length() + " is not: " + NUM_DOCS,
            docsWithField.length() == NUM_DOCS);
    for (int i = 0; i < docsWithField.length(); i++) {
        assertEquals(i % 2 == 0, docsWithField.get(i));
    }

    // getTermsIndex
    SortedDocValues termsIndex = cache.getTermsIndex(reader, "theRandomUnicodeString");
    for (int i = 0; i < NUM_DOCS; i++) {
        final String s;
        if (i > termsIndex.docID()) {
            termsIndex.advance(i);
        }
        if (i == termsIndex.docID()) {
            s = termsIndex.binaryValue().utf8ToString();
        } else {
            s = null;
        }
        assertTrue("for doc " + i + ": " + s + " does not equal: " + unicodeStrings[i],
                unicodeStrings[i] == null || unicodeStrings[i].equals(s));
    }

    int nTerms = termsIndex.getValueCount();

    TermsEnum tenum = termsIndex.termsEnum();
    for (int i = 0; i < nTerms; i++) {
        BytesRef val1 = BytesRef.deepCopyOf(tenum.next());
        final BytesRef val = termsIndex.lookupOrd(i);
        // System.out.println("i="+i);
        assertEquals(val, val1);
    }

    // seek the enum around (note this isn't a great test here)
    int num = atLeast(100);
    for (int i = 0; i < num; i++) {
        int k = random().nextInt(nTerms);
        final BytesRef val = BytesRef.deepCopyOf(termsIndex.lookupOrd(k));
        assertEquals(TermsEnum.SeekStatus.FOUND, tenum.seekCeil(val));
        assertEquals(val, tenum.term());
    }

    for (int i = 0; i < nTerms; i++) {
        final BytesRef val = BytesRef.deepCopyOf(termsIndex.lookupOrd(i));
        assertEquals(TermsEnum.SeekStatus.FOUND, tenum.seekCeil(val));
        assertEquals(val, tenum.term());
    }

    // test bad field
    termsIndex = cache.getTermsIndex(reader, "bogusfield");

    // getTerms
    BinaryDocValues terms = cache.getTerms(reader, "theRandomUnicodeString");
    for (int i = 0; i < NUM_DOCS; i++) {
        if (terms.docID() < i) {
            terms.nextDoc();
        }
        if (terms.docID() == i) {
            assertEquals(unicodeStrings[i], terms.binaryValue().utf8ToString());
        } else {
            assertNull(unicodeStrings[i]);
        }
    }

    // test bad field
    terms = cache.getTerms(reader, "bogusfield");

    // getDocTermOrds
    SortedSetDocValues termOrds = cache.getDocTermOrds(reader, "theRandomUnicodeMultiValuedField", null);
    int numEntries = cache.getCacheEntries().length;
    // ask for it again, and check that we didnt create any additional entries:
    termOrds = cache.getDocTermOrds(reader, "theRandomUnicodeMultiValuedField", null);
    assertEquals(numEntries, cache.getCacheEntries().length);

    for (int i = 0; i < NUM_DOCS; i++) {
        // This will remove identical terms. A DocTermOrds doesn't return duplicate ords for a docId
        List<BytesRef> values = new ArrayList<>(new LinkedHashSet<>(Arrays.asList(multiValued[i])));
        for (BytesRef v : values) {
            if (v == null) {
                // why does this test use null values... instead of an empty list: confusing
                break;
            }
            if (i > termOrds.docID()) {
                assertEquals(i, termOrds.nextDoc());
            }
            long ord = termOrds.nextOrd();
            assert ord != SortedSetDocValues.NO_MORE_ORDS;
            BytesRef scratch = termOrds.lookupOrd(ord);
            assertEquals(v, scratch);
        }
        if (i == termOrds.docID()) {
            assertEquals(SortedSetDocValues.NO_MORE_ORDS, termOrds.nextOrd());
        }
    }

    // test bad field
    termOrds = cache.getDocTermOrds(reader, "bogusfield", null);
    assertTrue(termOrds.getValueCount() == 0);

    FieldCache.DEFAULT.purgeByCacheKey(reader.getCoreCacheKey());
}

From source file:org.apache.solr.uninverting.TestFieldCacheVsDocValues.java

License:Apache License

private void assertEquals(int maxDoc, SortedDocValues expected, SortedDocValues actual) throws Exception {
    // can be null for the segment if no docs actually had any SortedDocValues
    // in this case FC.getDocTermsOrds returns EMPTY
    if (actual == null) {
        assertEquals(expected.getValueCount(), 0);
        return;//w ww.  j a  v  a  2  s.  co  m
    }
    assertEquals(expected.getValueCount(), actual.getValueCount());

    // compare ord lists
    while (true) {
        int docID = expected.nextDoc();
        if (docID == NO_MORE_DOCS) {
            assertEquals(NO_MORE_DOCS, actual.nextDoc());
            break;
        }
        assertEquals(docID, actual.nextDoc());
        assertEquals(expected.ordValue(), actual.ordValue());
        assertEquals(expected.binaryValue(), actual.binaryValue());
    }

    // compare ord dictionary
    for (long i = 0; i < expected.getValueCount(); i++) {
        final BytesRef expectedBytes = BytesRef.deepCopyOf(expected.lookupOrd((int) i));
        final BytesRef actualBytes = actual.lookupOrd((int) i);
        assertEquals(expectedBytes, actualBytes);
    }

    // compare termsenum
    assertEquals(expected.getValueCount(), expected.termsEnum(), actual.termsEnum());
}

From source file:org.codelibs.elasticsearch.search.MultiValueMode.java

License:Apache License

/**
 * Return a {SortedDocValues} instance that can be used to sort root documents
 * with this mode, the provided values and filters for root/inner documents.
 *
 * For every root document, the values of its inner documents will be aggregated.
 *
 * Allowed Modes: MIN, MAX//from w  w w . j  av  a  2 s  .com
 *
 * NOTE: Calling the returned instance on docs that are not root docs is illegal
 *       The returned instance can only be evaluate the current and upcoming docs
 */
public SortedDocValues select(final RandomAccessOrds values, final BitSet rootDocs,
        final DocIdSetIterator innerDocs) throws IOException {
    if (rootDocs == null || innerDocs == null) {
        return select(DocValues.emptySortedSet());
    }
    final SortedDocValues selectedValues = select(values);

    return new SortedDocValues() {

        int lastSeenRootDoc = 0;
        int lastEmittedOrd = -1;

        @Override
        public BytesRef lookupOrd(int ord) {
            return selectedValues.lookupOrd(ord);
        }

        @Override
        public int getValueCount() {
            return selectedValues.getValueCount();
        }

        @Override
        public int getOrd(int rootDoc) {
            assert rootDocs.get(rootDoc) : "can only sort root documents";
            assert rootDoc >= lastSeenRootDoc : "can only evaluate current and upcoming root docs";
            if (rootDoc == lastSeenRootDoc) {
                return lastEmittedOrd;
            }

            try {
                final int prevRootDoc = rootDocs.prevSetBit(rootDoc - 1);
                final int firstNestedDoc;
                if (innerDocs.docID() > prevRootDoc) {
                    firstNestedDoc = innerDocs.docID();
                } else {
                    firstNestedDoc = innerDocs.advance(prevRootDoc + 1);
                }

                lastSeenRootDoc = rootDoc;
                return lastEmittedOrd = pick(selectedValues, innerDocs, firstNestedDoc, rootDoc);
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }
    };
}

From source file:org.elasticsearch.index.fielddata.fieldcomparator.ReplaceMissingTests.java

License:Apache License

public void test() throws Exception {
    Directory dir = newDirectory();/*  w ww .j a  va 2  s  .  co m*/
    IndexWriterConfig iwc = newIndexWriterConfig(null);
    iwc.setMergePolicy(newLogMergePolicy());
    IndexWriter iw = new IndexWriter(dir, iwc);

    Document doc = new Document();
    doc.add(new SortedDocValuesField("field", new BytesRef("cat")));
    iw.addDocument(doc);

    doc = new Document();
    iw.addDocument(doc);

    doc = new Document();
    doc.add(new SortedDocValuesField("field", new BytesRef("dog")));
    iw.addDocument(doc);
    iw.forceMerge(1);
    iw.close();

    DirectoryReader reader = DirectoryReader.open(dir);
    LeafReader ar = getOnlySegmentReader(reader);
    SortedDocValues raw = ar.getSortedDocValues("field");
    assertEquals(2, raw.getValueCount());

    // existing values
    SortedDocValues dv = new BytesRefFieldComparatorSource.ReplaceMissing(raw, new BytesRef("cat"));
    assertEquals(2, dv.getValueCount());
    assertEquals("cat", dv.lookupOrd(0).utf8ToString());
    assertEquals("dog", dv.lookupOrd(1).utf8ToString());

    assertEquals(0, dv.getOrd(0));
    assertEquals(0, dv.getOrd(1));
    assertEquals(1, dv.getOrd(2));

    dv = new BytesRefFieldComparatorSource.ReplaceMissing(raw, new BytesRef("dog"));
    assertEquals(2, dv.getValueCount());
    assertEquals("cat", dv.lookupOrd(0).utf8ToString());
    assertEquals("dog", dv.lookupOrd(1).utf8ToString());

    assertEquals(0, dv.getOrd(0));
    assertEquals(1, dv.getOrd(1));
    assertEquals(1, dv.getOrd(2));

    // non-existing values
    dv = new BytesRefFieldComparatorSource.ReplaceMissing(raw, new BytesRef("apple"));
    assertEquals(3, dv.getValueCount());
    assertEquals("apple", dv.lookupOrd(0).utf8ToString());
    assertEquals("cat", dv.lookupOrd(1).utf8ToString());
    assertEquals("dog", dv.lookupOrd(2).utf8ToString());

    assertEquals(1, dv.getOrd(0));
    assertEquals(0, dv.getOrd(1));
    assertEquals(2, dv.getOrd(2));

    dv = new BytesRefFieldComparatorSource.ReplaceMissing(raw, new BytesRef("company"));
    assertEquals(3, dv.getValueCount());
    assertEquals("cat", dv.lookupOrd(0).utf8ToString());
    assertEquals("company", dv.lookupOrd(1).utf8ToString());
    assertEquals("dog", dv.lookupOrd(2).utf8ToString());

    assertEquals(0, dv.getOrd(0));
    assertEquals(1, dv.getOrd(1));
    assertEquals(2, dv.getOrd(2));

    dv = new BytesRefFieldComparatorSource.ReplaceMissing(raw, new BytesRef("ebay"));
    assertEquals(3, dv.getValueCount());
    assertEquals("cat", dv.lookupOrd(0).utf8ToString());
    assertEquals("dog", dv.lookupOrd(1).utf8ToString());
    assertEquals("ebay", dv.lookupOrd(2).utf8ToString());

    assertEquals(0, dv.getOrd(0));
    assertEquals(2, dv.getOrd(1));
    assertEquals(1, dv.getOrd(2));

    reader.close();
    dir.close();
}

From source file:org.elasticsearch.index.fielddata.plain.AbstractAtomicParentChildFieldData.java

License:Apache License

@Override
public final SortedBinaryDocValues getBytesValues() {
    return new SortedBinaryDocValues() {

        private final BytesRef[] terms = new BytesRef[2];
        private int count;

        @Override/*from www .jav  a2 s  .c  o m*/
        public void setDocument(int docId) {
            count = 0;
            for (String type : types()) {
                final SortedDocValues values = getOrdinalsValues(type);
                final int ord = values.getOrd(docId);
                if (ord >= 0) {
                    terms[count++] = values.lookupOrd(ord);
                }
            }
            assert count <= 2 : "A single doc can potentially be both parent and child, so the maximum allowed values is 2";
            if (count > 1) {
                int cmp = terms[0].compareTo(terms[1]);
                if (cmp > 0) {
                    ArrayUtil.swap(terms, 0, 1);
                } else if (cmp == 0) {
                    // If the id is the same between types the only omit one. For example: a doc has parent#1 in _uid field and has grand_parent#1 in _parent field.
                    count = 1;
                }
            }
        }

        @Override
        public int count() {
            return count;
        }

        @Override
        public BytesRef valueAt(int index) {
            return terms[index];
        }
    };
}

From source file:org.elasticsearch.join.fetch.ParentJoinFieldSubFetchPhase.java

License:Apache License

private String getSortedDocValue(String field, LeafReader reader, int docId) {
    try {//from ww  w  . j a va  2s.  c o m
        SortedDocValues docValues = reader.getSortedDocValues(field);
        if (docValues == null || docValues.advanceExact(docId) == false) {
            return null;
        }
        int ord = docValues.ordValue();
        BytesRef joinName = docValues.lookupOrd(ord);
        return joinName.utf8ToString();
    } catch (IOException e) {
        throw ExceptionsHelper.convertToElastic(e);
    }
}