Example usage for org.apache.lucene.document NumericDocValuesField setLongValue

List of usage examples for org.apache.lucene.document NumericDocValuesField setLongValue

Introduction

In this page you can find the example usage for org.apache.lucene.document NumericDocValuesField setLongValue.

Prototype

public void setLongValue(long value) 

Source Link

Document

Expert: change the value of this field.

Usage

From source file:com.vmware.xenon.services.common.LuceneIndexDocumentHelper.java

License:Open Source License

private NumericDocValuesField getAndSetNumericField(String propertyName, long propertyValue,
        boolean isCollectionItem) {
    if (isCollectionItem) {
        return new NumericDocValuesField(propertyName, propertyValue);
    }/*from  w  ww  . j  av a  2  s .c  o  m*/
    NumericDocValuesField ndField = this.numericFields.computeIfAbsent(propertyName, (k) -> {
        return new NumericDocValuesField(propertyName, propertyValue);
    });
    ndField.setLongValue(propertyValue);
    return ndField;
}

From source file:org.elasticsearch.common.lucene.uid.VersionsTests.java

License:Apache License

@Test
public void testNestedDocuments() throws IOException {
    Directory dir = newDirectory();/*w w w  .  j  a  v a  2  s. c  o  m*/
    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));

    List<Document> docs = new ArrayList<Document>();
    for (int i = 0; i < 4; ++i) {
        // Nested
        Document doc = new Document();
        doc.add(new Field(UidFieldMapper.NAME, "1", UidFieldMapper.Defaults.NESTED_FIELD_TYPE));
        docs.add(doc);
    }
    // Root
    Document doc = new Document();
    doc.add(new Field(UidFieldMapper.NAME, "1", UidFieldMapper.Defaults.FIELD_TYPE));
    NumericDocValuesField version = new NumericDocValuesField(VersionFieldMapper.NAME, 5L);
    doc.add(version);
    docs.add(doc);

    writer.updateDocuments(new Term(UidFieldMapper.NAME, "1"), docs);
    DirectoryReader directoryReader = DirectoryReader.open(writer, true);
    assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(5l));
    assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version,
            equalTo(5l));

    version.setLongValue(6L);
    writer.updateDocuments(new Term(UidFieldMapper.NAME, "1"), docs);
    version.setLongValue(7L);
    writer.updateDocuments(new Term(UidFieldMapper.NAME, "1"), docs);
    directoryReader = reopen(directoryReader);
    assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(7l));
    assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version,
            equalTo(7l));

    writer.deleteDocuments(new Term(UidFieldMapper.NAME, "1"));
    directoryReader = reopen(directoryReader);
    assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")),
            equalTo(Versions.NOT_FOUND));
    assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), nullValue());
    directoryReader.close();
    writer.close();
    dir.close();
}

From source file:org.elasticsearch.test.unit.common.lucene.uid.VersionsTests.java

License:Apache License

@Test
public void testNestedDocuments() throws IOException {
    Directory dir = newDirectory();/*w  ww.  j  av  a 2s  .co  m*/
    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));

    List<Document> docs = new ArrayList<Document>();
    for (int i = 0; i < 4; ++i) {
        // Nested
        Document doc = new Document();
        doc.add(new Field(UidFieldMapper.NAME, "1", UidFieldMapper.Defaults.NESTED_FIELD_TYPE));
        docs.add(doc);
    }
    // Root
    Document doc = new Document();
    doc.add(new Field(UidFieldMapper.NAME, "1", UidFieldMapper.Defaults.FIELD_TYPE));
    NumericDocValuesField version = new NumericDocValuesField(UidFieldMapper.VERSION, 5L);
    doc.add(version);
    docs.add(doc);

    writer.updateDocuments(new Term(UidFieldMapper.NAME, "1"), docs);
    DirectoryReader directoryReader = DirectoryReader.open(writer, true);
    assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(5l));
    assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version,
            equalTo(5l));

    version.setLongValue(6L);
    writer.updateDocuments(new Term(UidFieldMapper.NAME, "1"), docs);
    version.setLongValue(7L);
    writer.updateDocuments(new Term(UidFieldMapper.NAME, "1"), docs);
    directoryReader = reopen(directoryReader);
    assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(7l));
    assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version,
            equalTo(7l));

    writer.deleteDocuments(new Term(UidFieldMapper.NAME, "1"));
    directoryReader = reopen(directoryReader);
    assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")),
            equalTo(Versions.NOT_FOUND));
    assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), nullValue());
    directoryReader.close();
    writer.close();
    dir.close();
}

From source file:org.exist.indexing.lucene.LuceneIndexWorker.java

License:Open Source License

private void write() {
    if (nodesToWrite == null || nodesToWrite.isEmpty())
        return;//  w ww.  jav  a2s. com
    IndexWriter writer = null;
    try {
        writer = index.getWriter();
        // docId and nodeId are stored as doc value
        NumericDocValuesField fDocId = new NumericDocValuesField(FIELD_DOC_ID, 0);
        BinaryDocValuesField fNodeId = new BinaryDocValuesField(LuceneUtil.FIELD_NODE_ID, new BytesRef(8));
        // docId also needs to be indexed
        IntField fDocIdIdx = new IntField(FIELD_DOC_ID, 0, IntField.TYPE_NOT_STORED);

        final List<Field> metas = new ArrayList<>();
        final List<CategoryPath> paths = new ArrayList<>();

        broker.getIndexController().streamMetas(new MetaStreamListener() {
            @Override
            public void metadata(QName key, Object value) {
                if (value instanceof String) {
                    String name = key.getLocalName();//LuceneUtil.encodeQName(key, index.getBrokerPool().getSymbols());
                    Field fld = new Field(name, value.toString(), Field.Store.NO, Field.Index.ANALYZED,
                            Field.TermVector.YES);
                    metas.add(fld);
                    //System.out.println(" "+name+" = "+value.toString());

                    paths.add(new CategoryPath(name, value.toString()));
                }
            }
        });

        TaxonomyWriter taxoWriter = index.getTaxonomyWriter();
        FacetFields facetFields = new FacetFields(taxoWriter);

        for (PendingDoc pending : nodesToWrite) {
            final Document doc = new Document();
            fDocId.setLongValue(currentDoc.getDocId());
            doc.add(fDocId);

            // store the node id
            int nodeIdLen = pending.nodeId.size();
            byte[] data = new byte[nodeIdLen + 2];
            ByteConversion.shortToByte((short) pending.nodeId.units(), data, 0);
            pending.nodeId.serialize(data, 2);
            fNodeId.setBytesValue(data);
            doc.add(fNodeId);

            // add separate index for node id
            BinaryTokenStream bts = new BinaryTokenStream(new BytesRef(data));
            Field fNodeIdIdx = new Field(LuceneUtil.FIELD_NODE_ID, bts, TYPE_NODE_ID);
            doc.add(fNodeIdIdx);

            String contentField;
            // the text content is indexed in a field using either
            // the qname of the element or attribute or the field
            // name defined in the configuration
            if (pending.idxConf.isNamed())
                contentField = pending.idxConf.getName();
            else
                contentField = LuceneUtil.encodeQName(pending.qname, index.getBrokerPool().getSymbols());

            Field fld = new Field(contentField, pending.text.toString(), Field.Store.NO, Field.Index.ANALYZED,
                    Field.TermVector.YES);
            if (pending.idxConf.getBoost() > 0)
                fld.setBoost(pending.idxConf.getBoost());

            else if (config.getBoost() > 0)
                fld.setBoost(config.getBoost());

            doc.add(fld);

            fDocIdIdx.setIntValue(currentDoc.getDocId());
            doc.add(fDocIdIdx);

            for (Field meta : metas) {
                doc.add(meta);
            }
            if (!paths.isEmpty()) {
                facetFields.addFields(doc, paths);
            }

            if (pending.idxConf.getAnalyzer() == null)
                writer.addDocument(doc);
            else {
                writer.addDocument(doc, pending.idxConf.getAnalyzer());
            }
        }
    } catch (IOException e) {
        LOG.warn("An exception was caught while indexing document: " + e.getMessage(), e);
    } finally {
        index.releaseWriter(writer);
        nodesToWrite = new ArrayList<>();
        cachedNodesSize = 0;
    }
}