Example usage for org.apache.lucene.index IndexWriter deleteDocuments

List of usage examples for org.apache.lucene.index IndexWriter deleteDocuments

Introduction

In this page you can find the example usage for org.apache.lucene.index IndexWriter deleteDocuments.

Prototype

public long deleteDocuments(Query... queries) throws IOException 

Source Link

Document

Deletes the document(s) matching any of the provided queries.

Usage

From source file:org.eu.bitzone.Leia.java

License:Apache License

public void deleteTermDoc(final Object fText) {
    final Term t = (Term) getProperty(fText, "term");
    if (t == null) {
        return;/*from   ww w  . j av a  2 s  .  c om*/
    }
    if (ir == null) {
        showStatus(MSG_NOINDEX);
        return;
    }
    if (readOnly) {
        showStatus(MSG_READONLY);
        return;
    }
    try {
        final IndexWriter iw = createIndexWriter();
        iw.deleteDocuments(t);
        iw.close();
        refreshAfterWrite();
        infoMsg("Deleted docs for query '" + t
                + "'. Term dictionary and statistics will be incorrect until next merge or expunge deletes.");
    } catch (final Exception e) {
        e.printStackTrace();
        errorMsg("Error deleting doc: " + e.toString());
    }
}

From source file:org.eu.bitzone.Leia.java

License:Apache License

public void deleteDocs(final Object sTable) {
    if (ir == null) {
        showStatus(MSG_NOINDEX);//w ww. j av a  2s.co m
        return;
    }
    if (readOnly) {
        showStatus(MSG_READONLY);
        return;
    }
    final Query q = (Query) getProperty(sTable, "query");
    if (q == null) {
        errorMsg("Empty query.");
        return;
    }
    removeAll(sTable);
    try {
        final IndexWriter iw = createIndexWriter();
        iw.deleteDocuments(q);
        iw.close();
        refreshAfterWrite();
        infoMsg("Deleted docs for query '" + q
                + "'. Term dictionary and statistics will be incorrect until next merge or expunge deletes.");
    } catch (final Throwable e) {
        e.printStackTrace();
        errorMsg("Error deleting documents: " + e.toString());
    }
}

From source file:org.exist.indexing.lucene.LuceneIndexWorker.java

License:Open Source License

protected void removeDocument(int docId) {
    IndexWriter writer = null;
    try {/* w  ww . j  a va2  s .c o  m*/
        writer = index.getWriter();
        BytesRef bytes = new BytesRef(NumericUtils.BUF_SIZE_INT);
        NumericUtils.intToPrefixCoded(docId, 0, bytes);
        Term dt = new Term(FIELD_DOC_ID, bytes);
        writer.deleteDocuments(dt);
    } catch (IOException e) {
        LOG.warn("Error while removing lucene index: " + e.getMessage(), e);
    } finally {
        index.releaseWriter(writer);
        mode = StreamListener.STORE;
    }
}

From source file:org.exist.indexing.lucene.LuceneIndexWorker.java

License:Open Source License

protected void removePlainTextIndexes() {
    IndexWriter writer = null;
    try {/* w w w .ja v a  2s  . com*/
        writer = index.getWriter();
        String uri = currentDoc.getURI().toString();
        Term dt = new Term(FIELD_DOC_URI, uri);
        writer.deleteDocuments(dt);
    } catch (IOException e) {
        LOG.warn("Error while removing lucene index: " + e.getMessage(), e);
    } finally {
        index.releaseWriter(writer);
        mode = StreamListener.STORE;
    }
}

From source file:org.exist.indexing.lucene.LuceneIndexWorker.java

License:Open Source License

public void removeCollection(Collection collection, DBBroker broker, boolean reindex) {
    if (LOG.isDebugEnabled())
        LOG.debug("Removing collection " + collection.getURI());
    IndexWriter writer = null;
    try {/*w w  w. j  a  v a  2  s.  c  o  m*/
        writer = index.getWriter();
        for (Iterator<DocumentImpl> i = collection.iterator(broker); i.hasNext();) {
            DocumentImpl doc = i.next();
            BytesRef bytes = new BytesRef(NumericUtils.BUF_SIZE_INT);
            NumericUtils.intToPrefixCoded(doc.getDocId(), 0, bytes);
            Term dt = new Term(FIELD_DOC_ID, bytes);
            writer.deleteDocuments(dt);
        }
    } catch (IOException | PermissionDeniedException e) {
        LOG.error("Error while removing lucene index: " + e.getMessage(), e);
    } finally {
        index.releaseWriter(writer);
        if (reindex) {
            try {
                index.sync();
            } catch (DBException e) {
                LOG.warn("Exception during reindex: " + e.getMessage(), e);
            }
        }
        mode = StreamListener.STORE;
    }
    if (LOG.isDebugEnabled())
        LOG.debug("Collection removed.");
}

From source file:org.exist.indexing.lucene.LuceneIndexWorker.java

License:Open Source License

/**
 * Remove specific nodes from the index. This method is used for node updates
 * and called from flush() if the worker is in {@link StreamListener#REMOVE_SOME_NODES}
 * mode./*ww w .  java 2 s  .co m*/
 */
protected void removeNodes() {
    if (nodesToRemove == null)
        return;
    IndexWriter writer = null;
    try {
        writer = index.getWriter();

        BytesRef bytes = new BytesRef(NumericUtils.BUF_SIZE_INT);
        NumericUtils.intToPrefixCoded(currentDoc.getDocId(), 0, bytes);
        Term dt = new Term(FIELD_DOC_ID, bytes);
        TermQuery tq = new TermQuery(dt);
        for (NodeId nodeId : nodesToRemove) {
            // store the node id
            int nodeIdLen = nodeId.size();
            byte[] data = new byte[nodeIdLen + 2];
            ByteConversion.shortToByte((short) nodeId.units(), data, 0);
            nodeId.serialize(data, 2);

            Term it = new Term(LuceneUtil.FIELD_NODE_ID, new BytesRef(data));
            TermQuery iq = new TermQuery(it);
            BooleanQuery q = new BooleanQuery();
            q.add(tq, BooleanClause.Occur.MUST);
            q.add(iq, BooleanClause.Occur.MUST);
            writer.deleteDocuments(q);
        }
    } catch (IOException e) {
        LOG.warn("Error while deleting lucene index entries: " + e.getMessage(), e);
    } finally {
        index.releaseWriter(writer);
        nodesToRemove = null;
    }
}

From source file:org.exist.indexing.lucene.PlugToLucene.java

License:Open Source License

private void removeMetas(DocumentImpl doc, Metas metas) {

    //update lucene record

    IndexWriter writer = null;
    try {/*from w ww. j av a2s  .  c  o m*/
        writer = index.getWriter();
        String uri = metas.getURI();
        Term dt = new Term(FIELD_META_DOC_URI, uri);
        writer.deleteDocuments(dt);
    } catch (IOException e) {
        //LOG.warn("Error while removing lucene index: " + e.getMessage(), e);
    } finally {
        index.releaseWriter(writer);
    }
}

From source file:org.exist.indexing.range.RangeIndexWorker.java

License:Open Source License

@Override
public void removeCollection(Collection collection, DBBroker broker, boolean reindex)
        throws PermissionDeniedException {
    if (LOG.isDebugEnabled())
        LOG.debug("Removing collection " + collection.getURI());
    IndexWriter writer = null;
    try {/*from   www . ja  va2  s  .  c om*/
        writer = index.getWriter();
        for (Iterator<DocumentImpl> i = collection.iterator(broker); i.hasNext();) {
            DocumentImpl doc = i.next();
            BytesRef bytes = new BytesRef(NumericUtils.BUF_SIZE_INT);
            NumericUtils.intToPrefixCoded(doc.getDocId(), 0, bytes);
            Term dt = new Term(FIELD_DOC_ID, bytes);
            writer.deleteDocuments(dt);
        }
    } catch (IOException e) {
        LOG.error("Error while removing lucene index: " + e.getMessage(), e);
    } catch (PermissionDeniedException e) {
        LOG.error("Error while removing lucene index: " + e.getMessage(), e);
    } finally {
        index.releaseWriter(writer);
        if (reindex) {
            try {
                index.sync();
            } catch (DBException e) {
                LOG.warn("Exception during reindex: " + e.getMessage(), e);
            }
        }
        mode = StreamListener.STORE;
    }
    if (LOG.isDebugEnabled())
        LOG.debug("Collection removed.");
}

From source file:org.exist.indexing.range.RangeIndexWorker.java

License:Open Source License

/**
 * Remove specific nodes from the index. This method is used for node updates
 * and called from flush() if the worker is in {@link StreamListener#REMOVE_SOME_NODES}
 * mode.//w  ww.j  a  v  a  2  s  . c  o  m
 */
protected void removeNodes() {
    if (nodesToRemove == null)
        return;
    IndexWriter writer = null;
    try {
        writer = index.getWriter();

        for (NodeId nodeId : nodesToRemove) {
            // build id from nodeId and docId
            int nodeIdLen = nodeId.size();
            byte[] data = new byte[nodeIdLen + 4];
            ByteConversion.intToByteH(currentDoc.getDocId(), data, 0);
            nodeId.serialize(data, 4);

            Term it = new Term(FIELD_ID, new BytesRef(data));
            TermQuery iq = new TermQuery(it);
            writer.deleteDocuments(iq);
        }
    } catch (IOException e) {
        LOG.warn("Error while deleting lucene index entries: " + e.getMessage(), e);
    } finally {
        nodesToRemove = null;
        index.releaseWriter(writer);
    }
}

From source file:org.fracturedatlas.athena.apa.indexing.IndexingApaAdapter.java

License:Open Source License

public void deleteFromIndex(Object oid) {
    if (oid == null || indexingDisabled) {
        return;//w w  w.j  ava2  s. com
    }
    String id = IdAdapter.toString(oid);

    IndexWriter indexWriter = null;
    try {
        indexWriter = getWriter();
        indexWriter.deleteDocuments(new Term("_id", id));
        indexWriter.optimize();
    } catch (NullPointerException npe) {
        logger.error("Null pointer exception coming.  Did you call initializeIndex() ?");
        npe.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    }

}