Example usage for org.apache.lucene.index IndexWriter updateDocument

List of usage examples for org.apache.lucene.index IndexWriter updateDocument

Introduction

In this page you can find the example usage for org.apache.lucene.index IndexWriter updateDocument.

Prototype

private long updateDocument(final DocumentsWriterDeleteQueue.Node<?> delNode,
            Iterable<? extends IndexableField> doc) throws IOException 

Source Link

Usage

From source file:org.dspace.search.DSIndexer.java

License:BSD License

private static void executeIndexingTask(IndexWriter writer, IndexingTask action) throws IOException {
    if (action != null) {
        if (action.isDelete()) {
            if (action.getDocument() != null) {
                writer.updateDocument(action.getTerm(), action.getDocument());
            } else {
                writer.deleteDocuments(action.getTerm());
            }//from w  w w .j  av a2s. c o  m
        } else {
            writer.updateDocument(action.getTerm(), action.getDocument());
        }
    }
}

From source file:org.dspace.search.LuceneIndex.java

License:BSD License

private void commit(String documentKey, Document doc, boolean update) throws IOException {
    IndexWriter writer = null;
    Term term = new Term(DOCUMENT_KEY, documentKey);
    try {//  ww  w .j  a va  2s  . c o m
        writer = openIndex(false);
        if (update) {
            writer.updateDocument(term, doc);
        } else {
            writer.deleteDocuments(term);
        }
    } finally {
        if (doc != null) {
            closeAllReaders(doc);
        }
        if (writer != null) {
            try {
                writer.close();
            } catch (IOException e) {
                log.error("Unable to close IndexWriter", e);
            }
        }
    }
}

From source file:org.eclipse.dirigible.repository.ext.indexing.LuceneMemoryIndexer.java

License:Open Source License

@Override
public void updateDocument(Object document) throws EIndexingException {
    try {// w  w  w .  j a va 2s.  c  o m
        synchronized (directory) {

            logger.debug("entering: indexDocument(Document document) : " + indexName); //$NON-NLS-1$

            Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_35);
            IndexWriterConfig config = null;
            IndexWriter iwriter = null;
            try {
                config = new IndexWriterConfig(Version.LUCENE_35, analyzer);
                iwriter = new IndexWriter(directory, config);
                Term term = new Term(FIELD_ID, ((Document) document).get(FIELD_ID));
                iwriter.updateDocument(term, (Document) document);
                iwriter.commit();

                lastIndexed = new Date();

            } finally {
                if (iwriter != null) {
                    iwriter.close();
                }
            }
            logger.debug("exiting: indexRepository(IRepository repository) : " + indexName); //$NON-NLS-1$
        }
    } catch (Exception e) {
        throw new EIndexingException(e);
    }
}

From source file:org.eclipse.dirigible.repository.ext.indexing.RepositoryMemoryIndexer.java

License:Open Source License

private static void indexResource(IndexWriter iwriter, IResource resource)
        throws CorruptIndexException, IOException {

    logger.debug("entering: indexResource(IndexWriter iwriter, IResource resource)"); //$NON-NLS-1$

    if (!resource.isBinary()) {
        if (!indexedResources.contains(resource.getPath())) {
            logger.debug("Indexing resource: " + resource.getPath()); //$NON-NLS-1$
            Document doc = createDocument(resource);
            iwriter.addDocument(doc);// ww  w  .j  av  a2 s  .  c o m
            iwriter.commit();
            logger.debug("Resource: " + resource.getPath() + " indexed successfully"); //$NON-NLS-1$ //$NON-NLS-2$
            indexedResources.add(resource.getPath());
        } else {
            if (lastIndexed.before(resource.getInformation().getModifiedAt())) {
                logger.debug("Updating index for resource: " + resource.getPath()); //$NON-NLS-1$
                Document doc = createDocument(resource);
                Term term = new Term(FIELD_PATH, resource.getPath());
                iwriter.updateDocument(term, doc);
                iwriter.commit();
                logger.debug("For resource: " + resource.getPath() + " index updated successfully"); //$NON-NLS-1$ //$NON-NLS-2$
            } else {
                logger.debug("Skip indexing for unmodified resource: " + resource.getPath()); //$NON-NLS-1$
            }
        }
    } else {
        logger.debug("Skip indexing for binary resource: " + resource.getPath()); //$NON-NLS-1$
    }

    logger.debug("exiting: indexResource(IndexWriter iwriter, IResource resource)"); //$NON-NLS-1$
}

From source file:org.elasticsearch.action.termvector.AbstractTermVectorTests.java

License:Apache License

protected DirectoryReader indexDocsWithLucene(TestDoc[] testDocs) throws IOException {

    Map<String, Analyzer> mapping = new HashMap<String, Analyzer>();
    for (TestFieldSetting field : testDocs[0].fieldSettings) {
        if (field.storedPayloads) {
            mapping.put(field.name, new Analyzer() {
                @Override/*from w  w w  . ja va  2  s.  c  om*/
                protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
                    Tokenizer tokenizer = new StandardTokenizer(Version.CURRENT.luceneVersion, reader);
                    TokenFilter filter = new LowerCaseFilter(Version.CURRENT.luceneVersion, tokenizer);
                    filter = new TypeAsPayloadTokenFilter(filter);
                    return new TokenStreamComponents(tokenizer, filter);
                }

            });
        }
    }
    PerFieldAnalyzerWrapper wrapper = new PerFieldAnalyzerWrapper(
            new StandardAnalyzer(Version.CURRENT.luceneVersion, CharArraySet.EMPTY_SET), mapping);

    Directory dir = new RAMDirectory();
    IndexWriterConfig conf = new IndexWriterConfig(Version.CURRENT.luceneVersion, wrapper);

    conf.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
    IndexWriter writer = new IndexWriter(dir, conf);

    for (TestDoc doc : testDocs) {
        Document d = new Document();
        d.add(new Field("id", doc.id, StringField.TYPE_STORED));
        for (int i = 0; i < doc.fieldContent.length; i++) {
            FieldType type = new FieldType(TextField.TYPE_STORED);
            TestFieldSetting fieldSetting = doc.fieldSettings[i];

            type.setStoreTermVectorOffsets(fieldSetting.storedOffset);
            type.setStoreTermVectorPayloads(fieldSetting.storedPayloads);
            type.setStoreTermVectorPositions(
                    fieldSetting.storedPositions || fieldSetting.storedPayloads || fieldSetting.storedOffset);
            type.setStoreTermVectors(true);
            type.freeze();
            d.add(new Field(fieldSetting.name, doc.fieldContent[i], type));
        }
        writer.updateDocument(new Term("id", doc.id), d);
        writer.commit();
    }
    writer.close();

    return DirectoryReader.open(dir);
}

From source file:org.elasticsearch.action.termvector.TermVectorUnitTests.java

License:Apache License

private void writeEmptyTermVector(TermVectorResponse outResponse) throws IOException {

    Directory dir = newDirectory();//from   ww w . j  av  a2 s .c om
    IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT,
            new StandardAnalyzer(TEST_VERSION_CURRENT));
    conf.setOpenMode(OpenMode.CREATE);
    IndexWriter writer = new IndexWriter(dir, conf);
    FieldType type = new FieldType(TextField.TYPE_STORED);
    type.setStoreTermVectorOffsets(true);
    type.setStoreTermVectorPayloads(false);
    type.setStoreTermVectorPositions(true);
    type.setStoreTermVectors(true);
    type.freeze();
    Document d = new Document();
    d.add(new Field("id", "abc", StringField.TYPE_STORED));

    writer.updateDocument(new Term("id", "abc"), d);
    writer.commit();
    writer.close();
    DirectoryReader dr = DirectoryReader.open(dir);
    IndexSearcher s = new IndexSearcher(dr);
    TopDocs search = s.search(new TermQuery(new Term("id", "abc")), 1);
    ScoreDoc[] scoreDocs = search.scoreDocs;
    int doc = scoreDocs[0].doc;
    Fields fields = dr.getTermVectors(doc);
    EnumSet<Flag> flags = EnumSet.of(Flag.Positions, Flag.Offsets);
    outResponse.setFields(fields, null, flags, fields);
    outResponse.setExists(true);
    dr.close();
    dir.close();

}

From source file:org.elasticsearch.action.termvector.TermVectorUnitTests.java

License:Apache License

private void writeStandardTermVector(TermVectorResponse outResponse) throws IOException {

    Directory dir = newDirectory();/*from w ww  . j  a va  2  s . co  m*/
    IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT,
            new StandardAnalyzer(TEST_VERSION_CURRENT));

    conf.setOpenMode(OpenMode.CREATE);
    IndexWriter writer = new IndexWriter(dir, conf);
    FieldType type = new FieldType(TextField.TYPE_STORED);
    type.setStoreTermVectorOffsets(true);
    type.setStoreTermVectorPayloads(false);
    type.setStoreTermVectorPositions(true);
    type.setStoreTermVectors(true);
    type.freeze();
    Document d = new Document();
    d.add(new Field("id", "abc", StringField.TYPE_STORED));
    d.add(new Field("title", "the1 quick brown fox jumps over  the1 lazy dog", type));
    d.add(new Field("desc", "the1 quick brown fox jumps over  the1 lazy dog", type));

    writer.updateDocument(new Term("id", "abc"), d);
    writer.commit();
    writer.close();
    DirectoryReader dr = DirectoryReader.open(dir);
    IndexSearcher s = new IndexSearcher(dr);
    TopDocs search = s.search(new TermQuery(new Term("id", "abc")), 1);
    ScoreDoc[] scoreDocs = search.scoreDocs;
    int doc = scoreDocs[0].doc;
    Fields termVectors = dr.getTermVectors(doc);
    EnumSet<Flag> flags = EnumSet.of(Flag.Positions, Flag.Offsets);
    outResponse.setFields(termVectors, null, flags, termVectors);
    dr.close();
    dir.close();

}

From source file:org.elasticsearch.action.termvectors.AbstractTermVectorsTestCase.java

License:Apache License

protected DirectoryReader indexDocsWithLucene(TestDoc[] testDocs) throws IOException {
    Map<String, Analyzer> mapping = new HashMap<>();
    for (TestFieldSetting field : testDocs[0].fieldSettings) {
        if (field.storedPayloads) {
            mapping.put(field.name, new Analyzer() {
                @Override/*from   w  w  w. j  ava 2 s  .c  o m*/
                protected TokenStreamComponents createComponents(String fieldName) {
                    Tokenizer tokenizer = new StandardTokenizer();
                    TokenFilter filter = new LowerCaseFilter(tokenizer);
                    filter = new TypeAsPayloadTokenFilter(filter);
                    return new TokenStreamComponents(tokenizer, filter);
                }

            });
        }
    }
    PerFieldAnalyzerWrapper wrapper = new PerFieldAnalyzerWrapper(new StandardAnalyzer(CharArraySet.EMPTY_SET),
            mapping);

    Directory dir = new RAMDirectory();
    IndexWriterConfig conf = new IndexWriterConfig(wrapper);

    conf.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
    IndexWriter writer = new IndexWriter(dir, conf);

    for (TestDoc doc : testDocs) {
        Document d = new Document();
        d.add(new Field("id", doc.id, StringField.TYPE_STORED));
        for (int i = 0; i < doc.fieldContent.length; i++) {
            FieldType type = new FieldType(TextField.TYPE_STORED);
            TestFieldSetting fieldSetting = doc.fieldSettings[i];

            type.setStoreTermVectorOffsets(fieldSetting.storedOffset);
            type.setStoreTermVectorPayloads(fieldSetting.storedPayloads);
            type.setStoreTermVectorPositions(
                    fieldSetting.storedPositions || fieldSetting.storedPayloads || fieldSetting.storedOffset);
            type.setStoreTermVectors(true);
            type.freeze();
            d.add(new Field(fieldSetting.name, doc.fieldContent[i], type));
        }
        writer.updateDocument(new Term("id", doc.id), d);
        writer.commit();
    }
    writer.close();

    return DirectoryReader.open(dir);
}

From source file:org.elasticsearch.action.termvectors.TermVectorsUnitTests.java

License:Apache License

private void writeEmptyTermVector(TermVectorsResponse outResponse) throws IOException {

    Directory dir = newDirectory();/*from   w  ww .  ja va2 s  .  co m*/
    IndexWriterConfig conf = new IndexWriterConfig(new StandardAnalyzer());
    conf.setOpenMode(OpenMode.CREATE);
    IndexWriter writer = new IndexWriter(dir, conf);
    FieldType type = new FieldType(TextField.TYPE_STORED);
    type.setStoreTermVectorOffsets(true);
    type.setStoreTermVectorPayloads(false);
    type.setStoreTermVectorPositions(true);
    type.setStoreTermVectors(true);
    type.freeze();
    Document d = new Document();
    d.add(new Field("id", "abc", StringField.TYPE_STORED));

    writer.updateDocument(new Term("id", "abc"), d);
    writer.commit();
    writer.close();
    DirectoryReader dr = DirectoryReader.open(dir);
    IndexSearcher s = new IndexSearcher(dr);
    TopDocs search = s.search(new TermQuery(new Term("id", "abc")), 1);
    ScoreDoc[] scoreDocs = search.scoreDocs;
    int doc = scoreDocs[0].doc;
    Fields fields = dr.getTermVectors(doc);
    EnumSet<Flag> flags = EnumSet.of(Flag.Positions, Flag.Offsets);
    outResponse.setFields(fields, null, flags, fields);
    outResponse.setExists(true);
    dr.close();
    dir.close();

}

From source file:org.elasticsearch.action.termvectors.TermVectorsUnitTests.java

License:Apache License

private void writeStandardTermVector(TermVectorsResponse outResponse) throws IOException {

    Directory dir = newDirectory();//w w w .j  av a 2s .  c om
    IndexWriterConfig conf = new IndexWriterConfig(new StandardAnalyzer());

    conf.setOpenMode(OpenMode.CREATE);
    IndexWriter writer = new IndexWriter(dir, conf);
    FieldType type = new FieldType(TextField.TYPE_STORED);
    type.setStoreTermVectorOffsets(true);
    type.setStoreTermVectorPayloads(false);
    type.setStoreTermVectorPositions(true);
    type.setStoreTermVectors(true);
    type.freeze();
    Document d = new Document();
    d.add(new Field("id", "abc", StringField.TYPE_STORED));
    d.add(new Field("title", "the1 quick brown fox jumps over  the1 lazy dog", type));
    d.add(new Field("desc", "the1 quick brown fox jumps over  the1 lazy dog", type));

    writer.updateDocument(new Term("id", "abc"), d);
    writer.commit();
    writer.close();
    DirectoryReader dr = DirectoryReader.open(dir);
    IndexSearcher s = new IndexSearcher(dr);
    TopDocs search = s.search(new TermQuery(new Term("id", "abc")), 1);
    ScoreDoc[] scoreDocs = search.scoreDocs;
    int doc = scoreDocs[0].doc;
    Fields termVectors = dr.getTermVectors(doc);
    EnumSet<Flag> flags = EnumSet.of(Flag.Positions, Flag.Offsets);
    outResponse.setFields(termVectors, null, flags, termVectors);
    dr.close();
    dir.close();

}