List of usage examples for org.apache.lucene.index IndexWriter deleteDocuments
public long deleteDocuments(Query... queries) throws IOException
From source file:action.indexing.IndexingTest.java
License:Apache License
public void testDeleteBeforeOptimize() throws IOException { IndexWriter writer = getWriter(); assertEquals(2, writer.numDocs()); //A writer.deleteDocuments(new Term("id", "1")); //B writer.commit();//from w ww.j a v a 2 s . co m assertTrue(writer.hasDeletions()); //1 assertEquals(2, writer.maxDoc()); //2 assertEquals(1, writer.numDocs()); //2 writer.close(); }
From source file:action.indexing.IndexingTest.java
License:Apache License
public void testDeleteAfterOptimize() throws IOException { IndexWriter writer = getWriter(); assertEquals(2, writer.numDocs());//from w ww . j av a 2 s . c o m writer.deleteDocuments(new Term("id", "1")); writer.optimize(); //3 writer.commit(); assertFalse(writer.hasDeletions()); assertEquals(1, writer.maxDoc()); //C assertEquals(1, writer.numDocs()); //C writer.close(); }
From source file:aos.lucene.search.msc.NearRealTimeTest.java
License:Apache License
public void testNearRealTime() throws Exception { Directory dir = new RAMDirectory(); IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(Version.LUCENE_46), IndexWriter.MaxFieldLength.UNLIMITED); for (int i = 0; i < 10; i++) { Document doc = new Document(); doc.add(new Field("id", "" + i, Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS)); doc.add(new Field("text", "aaa", Field.Store.NO, Field.Index.ANALYZED)); writer.addDocument(doc);/*from w w w . j av a 2 s . co m*/ } IndexReader reader = writer.getReader(); IndexSearcher searcher = new IndexSearcher(reader); Query query = new TermQuery(new Term("text", "aaa")); TopDocs docs = searcher.search(query, 1); assertEquals(10, docs.totalHits); writer.deleteDocuments(new Term("id", "7")); Document doc = new Document(); doc.add(new Field("id", "11", Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS)); doc.add(new Field("text", "bbb", Field.Store.NO, Field.Index.ANALYZED)); writer.addDocument(doc); IndexReader newReader = reader.reopen(); assertFalse(reader == newReader); reader.close(); searcher = new IndexSearcher(newReader); TopDocs hits = searcher.search(query, 10); assertEquals(9, hits.totalHits); query = new TermQuery(new Term("text", "bbb")); hits = searcher.search(query, 1); assertEquals(1, hits.totalHits); newReader.close(); writer.close(); }
From source file:Application.mediaIndexer.java
public static void indexDocs(final IndexWriter writer, Path path, TextArea results, boolean removeFiles) throws IOException, SAXException, TikaException { boolean delFiles = removeFiles; if (Files.isDirectory(path)) { Files.walkFileTree(path, new SimpleFileVisitor<Path>() { @Override/* w w w.jav a2 s .c om*/ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { try { if (delFiles) { writer.deleteDocuments(new Term("path", file.toString())); } else { indexDoc(writer, file, results, attrs.lastModifiedTime().toMillis()); } } catch (IOException ignore) { } catch (SAXException e) { e.printStackTrace(); } catch (TikaException e) { e.printStackTrace(); } return FileVisitResult.CONTINUE; } }); } else if (delFiles) { writer.deleteDocuments(new Term("path", path.toString())); } else { indexDoc(writer, path, results, Files.getLastModifiedTime(path).toMillis()); } }
From source file:arena.lucene.LuceneIndexUpdater.java
License:Open Source License
public int updateIndex(boolean deleteAllFromIndexFirst, Iterable<T> valueobjects) { IndexWriter writer = null; try {/*from w ww . jav a 2 s . c o m*/ writer = new IndexWriter(directoryBean.getDirectory(), analyzer, deleteAllFromIndexFirst, MaxFieldLength.LIMITED); int docCount = 0; for (T vo : valueobjects) { Term pkTerm = this.contentMarshall.getPKTerm(vo); writer.deleteDocuments(pkTerm); Document doc = this.contentMarshall.serialize(vo); if (doc != null) { writer.addDocument(doc); docCount++; } } if (this.searchersToReset != null) { for (LuceneIndexSearcher<?> searcher : this.searchersToReset) { searcher.reset(); } } return docCount; } catch (IOException err) { throw new RuntimeException("Error deleting documents from lucene index", err); } finally { if (writer != null) { try { writer.close(); } catch (IOException err) { } } } }
From source file:ca.pgon.freenetknowledge.search.impl.LuceneIndexerThread.java
License:Apache License
public void removing(UrlEntity refererURL) { try {// w ww. j a v a2s . c o m semaphore.acquire(); Term term = new Term(LuceneSearchEngine.INDEX_REFERER_URL, String.valueOf(refererURL.getId())); IndexWriter indexWriter = genIndexWriter(); indexWriter.deleteDocuments(term); indexWriter.close(); } catch (NoSuchDirectoryException e) { // The index is empty, so not an issue } catch (Exception e) { logger.log(Level.SEVERE, "Error while removing referer", e); } finally { semaphore.release(); } }
From source file:calliope.search.AeseSearch.java
License:Open Source License
/** * Update the index for just ONE docID * @param docID the documents to regenerate * @param langCode the language code of the analyzer * @throws AeseException /*from www .java 2s . com*/ */ public static void updateIndex(String docID, String langCode) throws AeseException { try { Analyzer analyzer = createAnalyzer(langCode); IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_45, analyzer); if (index == null) throw new AeseException("Index must be initialised before update"); IndexWriter w = new IndexWriter(index, config); Term t = new Term(LuceneFields.DOCID, docID); w.deleteDocuments(t); addCorTextoIndex(docID, w); w.close(); } catch (Exception e) { throw new AeseException(e); } }
From source file:cn.hbu.cs.esearch.index.BaseSearchIndex.java
License:Apache License
private void deleteDocs(LongSet delDocs) throws IOException { if (delDocs == null || delDocs.size() == 0) { return;//from w ww. ja v a2 s .co m } EsearchMultiReader<R> reader = openIndexReader(); if (reader == null) { return; } UIDFilter uidFilter = new UIDFilter(delDocs.toLongArray(), reader); IndexWriter writer = null; try { writer = openIndexWriter(null, null); writer.deleteDocuments(new ConstantScoreQuery(uidFilter)); writer.commit(); } finally { closeIndexWriter(); } }
From source file:cn.hbu.cs.esearch.index.LuceneIndexDataLoader.java
License:Apache License
private final void purgeDocuments() { if (_purgeFilter != null) { BaseSearchIndex<R> idx = getSearchIndex(); IndexWriter writer = null; LOGGER.info("purging docs started..."); int count = 0; long start = System.currentTimeMillis(); try {/*from w w w . j a v a 2s.c o m*/ writer = idx.openIndexWriter(null, null); ConstantScoreQuery q = new ConstantScoreQuery(_purgeFilter); writer.deleteDocuments(q); writer.commit(); } catch (Throwable th) { LOGGER.error("problem creating purge filter: " + th.getMessage(), th); } finally { idx.closeIndexWriter(); } long end = System.currentTimeMillis(); LOGGER.info("purging docs completed in " + (end - start) + "ms"); LOGGER.info("total docs purged: " + count); } }
From source file:com.aperigeek.dropvault.web.service.IndexService.java
License:Open Source License
public void remove(String username, String password, String id) throws IndexException { try {/* w ww . ja v a 2s .c om*/ IndexWriter writer = getIndexWriter(username, password); writer.deleteDocuments(new Term("id", id)); writer.close(); } catch (IOException ex) { throw new IndexException(ex); } }