Example usage for org.apache.lucene.document LongPoint setLongValue

List of usage examples for org.apache.lucene.document LongPoint setLongValue

Introduction

In this page you can find the example usage for org.apache.lucene.document LongPoint setLongValue.

Prototype

@Override
    public void setLongValue(long value) 

Source Link

Usage

From source file:com.vmware.xenon.services.common.LuceneIndexDocumentHelper.java

License:Open Source License

void addNumericField(String propertyName, long propertyValue, boolean isStored, boolean isCollectionItem,
        boolean sorted) {
    if (isStored) {
        Field field = isCollectionItem ? new StoredField(propertyName, propertyValue)
                : getAndSetStoredField(propertyName, propertyValue);
        this.doc.add(field);
    }//ww w.j a  v a 2s.c om

    // LongPoint adds an index field to the document that allows for efficient search
    // and range queries
    if (isCollectionItem) {
        this.doc.add(new LongPoint(propertyName, propertyValue));
    } else {
        LongPoint lpField = this.longPointFields.computeIfAbsent(propertyName, (k) -> {
            return new LongPoint(propertyName, propertyValue);
        });
        lpField.setLongValue(propertyValue);
        this.doc.add(lpField);
    }

    // NumericDocValues allow for efficient group operations for a property.
    NumericDocValuesField ndField = getAndSetNumericField(propertyName, propertyValue, isCollectionItem);
    this.doc.add(ndField);

    if (sorted) {
        // special handling for groupBy queries, docValuesField can not be added twice
        // We suffix the property name with "_group", add a SortedDocValuesField
        Field sdField = getAndSetSortedStoredField(propertyName + GROUP_BY_PROPERTY_NAME_SUFFIX,
                Long.toString(propertyValue));
        this.doc.add(sdField);
    }
}

From source file:org.apache.solr.uninverting.TestFieldCache.java

License:Apache License

public void testLongFieldCache() throws IOException {
    Directory dir = newDirectory();/*w  w  w .  j ava2 s.c  om*/
    IndexWriterConfig cfg = newIndexWriterConfig(new MockAnalyzer(random()));
    cfg.setMergePolicy(newLogMergePolicy());
    RandomIndexWriter iw = new RandomIndexWriter(random(), dir, cfg);
    Document doc = new Document();
    LongPoint field = new LongPoint("f", 0L);
    StoredField field2 = new StoredField("f", 0L);
    doc.add(field);
    doc.add(field2);
    final long[] values = new long[TestUtil.nextInt(random(), 1, 10)];
    Set<Integer> missing = new HashSet<>();
    for (int i = 0; i < values.length; ++i) {
        final long v;
        switch (random().nextInt(10)) {
        case 0:
            v = Long.MIN_VALUE;
            break;
        case 1:
            v = 0;
            break;
        case 2:
            v = Long.MAX_VALUE;
            break;
        default:
            v = TestUtil.nextLong(random(), -10, 10);
            break;
        }
        values[i] = v;
        if (v == 0 && random().nextBoolean()) {
            // missing
            iw.addDocument(new Document());
            missing.add(i);
        } else {
            field.setLongValue(v);
            field2.setLongValue(v);
            iw.addDocument(doc);
        }
    }
    iw.forceMerge(1);
    final DirectoryReader reader = iw.getReader();
    final NumericDocValues longs = FieldCache.DEFAULT.getNumerics(getOnlyLeafReader(reader), "f",
            FieldCache.LONG_POINT_PARSER);
    for (int i = 0; i < values.length; ++i) {
        if (missing.contains(i) == false) {
            assertEquals(i, longs.nextDoc());
            assertEquals(values[i], longs.longValue());
        }
    }
    assertEquals(NO_MORE_DOCS, longs.nextDoc());
    reader.close();
    iw.close();
    dir.close();
}

From source file:org.elasticsearch.index.mapper.core.DateFieldTypeTests.java

License:Apache License

public void testIsFieldWithinQuery() throws IOException {
    Directory dir = newDirectory();/* ww  w . ja v a 2 s  .c  o  m*/
    IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
    long instant1 = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime("2015-10-12")
            .getMillis();
    long instant2 = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime("2016-04-03")
            .getMillis();
    Document doc = new Document();
    LongPoint field = new LongPoint("my_date", instant1);
    doc.add(field);
    w.addDocument(doc);
    field.setLongValue(instant2);
    w.addDocument(doc);
    DirectoryReader reader = DirectoryReader.open(w);
    DateFieldType ft = new DateFieldType();
    ft.setName("my_date");
    DateMathParser alternateFormat = new DateMathParser(LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER);
    doTestIsFieldWithinQuery(ft, reader, null, null);
    doTestIsFieldWithinQuery(ft, reader, null, alternateFormat);
    doTestIsFieldWithinQuery(ft, reader, DateTimeZone.UTC, null);
    doTestIsFieldWithinQuery(ft, reader, DateTimeZone.UTC, alternateFormat);

    // Fields with no value indexed.
    DateFieldType ft2 = new DateFieldType();
    ft2.setName("my_date2");
    assertEquals(Relation.DISJOINT,
            ft2.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02", false, false, null, null));
    IOUtils.close(reader, w, dir);
}

From source file:org.elasticsearch.index.mapper.core.ScaledFloatFieldTypeTests.java

License:Apache License

public void testStats() throws IOException {
    ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType();
    ft.setName("scaled_float");
    ft.setScalingFactor(0.1 + randomDouble() * 100);
    Directory dir = newDirectory();/* w  ww . j  a  v a 2  s  . c  o m*/
    IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
    try (DirectoryReader reader = DirectoryReader.open(w)) {
        assertNull(ft.stats(reader));
    }
    Document doc = new Document();
    LongPoint point = new LongPoint("scaled_float", -1);
    doc.add(point);
    w.addDocument(doc);
    point.setLongValue(10);
    w.addDocument(doc);
    try (DirectoryReader reader = DirectoryReader.open(w)) {
        FieldStats<?> stats = ft.stats(reader);
        assertEquals(-1 / ft.getScalingFactor(), stats.getMinValue());
        assertEquals(10 / ft.getScalingFactor(), stats.getMaxValue());
        assertEquals(2, stats.getMaxDoc());
    }
    w.deleteAll();
    try (DirectoryReader reader = DirectoryReader.open(w)) {
        assertNull(ft.stats(reader));
    }
    IOUtils.close(w, dir);
}