List of usage examples for org.apache.lucene.index IndexWriter deleteDocuments
public long deleteDocuments(Query... queries) throws IOException
From source file:gal.udc.fic.muei.tfm.dap.flipper.service.util.cbir.LireBuilder.java
License:Open Source License
private static void deleteFromFeature(UUID pictureId, Term term, String prefix, IndexWriterConfig conf) throws IOException { File file = getPath(prefix);/*from ww w.ja v a2s .c o m*/ // Creating an Lucene IndexWriter log.debug("Is Lucene configured: " + (conf == null)); if (conf == null) { conf = new IndexWriterConfig(LuceneUtils.LUCENE_VERSION, new WhitespaceAnalyzer(LuceneUtils.LUCENE_VERSION)); conf.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND); } IndexWriter iw = new IndexWriter(FSDirectory.open(file), conf); iw.deleteDocuments(term); iw.close(); }
From source file:io.datalayer.lucene.delete.LuceneDeleteTest.java
License:Apache License
@Test public void testDelete() throws IOException { IndexWriter writer = AosIndexUtil.newIndexWithDocuments(); Term term = new Term(ID, "1"); Query query = new TermQuery(term); IndexReader reader = DirectoryReader.open(writer, true); IndexSearcher indexSearcher = new IndexSearcher(DirectoryReader.open(writer, true)); TopDocs topDocs = indexSearcher.search(query, 1); LOGGER.info("" + topDocs.scoreDocs[0].doc); assertNotNull(reader.document(topDocs.scoreDocs[0].doc)); LOGGER.info("Deleting documents containing " + term); writer.deleteDocuments(term); // writer.deleteDocuments(query); writer.commit();//from w w w . j a va 2 s . c o m indexSearcher = new IndexSearcher(DirectoryReader.open(writer, true)); topDocs = indexSearcher.search(query, 1); assertEquals(0, topDocs.scoreDocs.length); reader.close(); writer.close(); }
From source file:io.datalayer.lucene.index.LuceneLifecycleTest.java
License:Apache License
/** * #1 Run before every test// w w w. ja v a 2 s . c o m * * #2 Create IndexWriter * * #3 Add documents * * #4 Create new searcher * * #5 Build simple single-term query * * #6 Get number of hits * * #7 Verify writer document count * * #8 Verify reader document count */ @Test public void testDeleteBeforeOptimize() throws IOException { IndexWriter writer = getWriter(); assertEquals(2, writer.numDocs()); writer.deleteDocuments(new Term("id", "1")); writer.commit(); assertTrue(writer.hasDeletions()); assertEquals(2, writer.maxDoc()); assertEquals(1, writer.numDocs()); writer.close(); }
From source file:io.datalayer.lucene.index.LuceneLifecycleTest.java
License:Apache License
/** * #1 Index contains deletions/*from w w w.j a va 2 s . co m*/ * * #2 1 indexed document, 1 deleted document * * #3 Optimize compacts deletes */ @Test public void testDeleteAfterOptimize() throws IOException { IndexWriter writer = getWriter(); assertEquals(2, writer.numDocs()); writer.deleteDocuments(new Term("id", "1")); writer.forceMerge(1); writer.commit(); assertFalse(writer.hasDeletions()); assertEquals(1, writer.maxDoc()); assertEquals(1, writer.numDocs()); writer.close(); }
From source file:io.jpress.module.article.searcher.LuceneSearcher.java
License:LGPL
@Override public void deleteArticle(Object id) { IndexWriter writer = null; try {//from w w w .j a v a 2 s . c om writer = createIndexWriter(); writer.deleteDocuments(new Term("aid", id.toString())); } catch (IOException e) { e.printStackTrace(); } finally { CommonsUtils.quietlyClose(writer); } }
From source file:io.jpress.searcher.LuceneSearcher.java
License:LGPL
@Override public void deleteBean(String beanId) { try {/*from www . ja v a 2s. c o m*/ IndexWriter indexWriter = createIndexWriter(); Term term = new Term("sid", beanId); indexWriter.deleteDocuments(term); indexWriter.commit(); indexWriter.close(); } catch (IOException e) { e.printStackTrace(); } }
From source file:it.drwolf.ridire.index.sketch.AsyncSketchCreator.java
License:Apache License
@Asynchronous public QuartzTriggerHandle updateSketches(IndexWriter indexWriter, List<String> itemsToBeProcessed, String cqpExecutable, String cqpRegistry, String cqpCorpusName, String goodFor1, String goodFor2) { int toBeProcessed = itemsToBeProcessed.size(); int count = 0; this.cqpExecutable = cqpExecutable; this.cqpRegistry = cqpRegistry; this.cqpCorpusName = cqpCorpusName; QuartzTriggerHandle handle = new QuartzTriggerHandle("RIDIRE sketchcreator"); Set<String> functionalMetadata = this.localResourcesManager.getAllFunctionalMetadataMap().keySet(); Set<String> semanticMetadata = this.localResourcesManager.getAllSemanticMetadataMap().keySet(); try {// w w w .j a va 2s .c om System.out.println("Sketchcreator update running " + this.toString()); for (String lemma : itemsToBeProcessed) { ++count; for (Sketch s : SketchList.getSketchesToUpdate()) { BooleanQuery bq = new BooleanQuery(); TermQuery tqLemma = new TermQuery(new Term("lemma", lemma)); bq.add(tqLemma, Occur.MUST); if (s.getName().startsWith("pp_")) { PrefixQuery prefixQuery = new PrefixQuery(new Term("sketch", "pp_")); bq.add(prefixQuery, Occur.MUST); } else { TermQuery sq = new TermQuery(new Term("sketch", s.getName())); bq.add(sq, Occur.MUST); } // remove wrongly assigned sketches indexWriter.deleteDocuments(bq); } indexWriter.commit(); for (Sketch s : SketchList.getSketchesToUpdate()) { // recreate sketches for (String functionalMetadatum : functionalMetadata) { HashMap<String, SketchResult> sr = this.extractSingleLemmaSketches(lemma, functionalMetadatum, null, s, indexWriter); if (!s.isTrinary()) { this.addDocument(sr, lemma, indexWriter, s.getName(), "WORD_SKETCH", functionalMetadatum, null, s.getGoodFor()); } } for (String semanticMetadatum : semanticMetadata) { HashMap<String, SketchResult> sr = this.extractSingleLemmaSketches(lemma, null, semanticMetadatum, s, indexWriter); if (!s.isTrinary()) { this.addDocument(sr, lemma, indexWriter, s.getName(), "WORD_SKETCH", null, semanticMetadatum, s.getGoodFor()); } } HashMap<String, SketchResult> sr = this.extractSingleLemmaSketches(lemma, null, null, s, indexWriter); if (!s.isTrinary()) { this.addDocument(sr, lemma, indexWriter, s.getName(), "WORD_SKETCH", null, null, s.getGoodFor()); } } System.out.println(goodFor1 + "-" + goodFor2 + " elaborati: " + count + " su " + toBeProcessed); } System.out.println("Sketchcreator update done " + this.toString()); } catch (CorruptIndexException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } System.out.println("Sketchcreator done"); return handle; }
From source file:it.pronetics.madstore.repository.index.impl.LuceneIndexer.java
License:Apache License
public void delete(String collectionKey, String entryKey) { try {/*from w w w . j ava 2 s.c o m*/ if (collectionKey == null || entryKey == null) { throw new AtomIndexingException("Parameters collectionKey and entryKey cannot be null."); } Term primaryKeyTerm = new Term(LuceneIndexManager.INDEX_PRIMARY_KEY, collectionKey + entryKey); IndexWriter indexWriter = newIndexWriter(directory); indexWriter.deleteDocuments(primaryKeyTerm); indexWriter.close(); } catch (Exception e) { throw new AtomIndexingException(e.getMessage(), e); } }
From source file:lia.chapter2.IndexingTest.java
License:Apache License
public void testDeleteBeforeOptimize() throws IOException { IndexWriter writer = getWriter(); assertEquals(2, writer.numDocs()); //A writer.deleteDocuments(new Term("id", "1")); //B writer.commit();/*from w w w. j a v a 2 s . co m*/ assertTrue(writer.hasDeletions()); //1 assertEquals(2, writer.maxDoc()); //2 assertEquals(1, writer.numDocs()); //2 writer.close(); }
From source file:lia.chapter2.IndexingTest.java
License:Apache License
public void testDeleteAfterOptimize() throws IOException { IndexWriter writer = getWriter(); assertEquals(2, writer.numDocs());/* w w w . ja va2s .c om*/ writer.deleteDocuments(new Term("id", "1")); writer.commit(); assertFalse(writer.hasDeletions()); assertEquals(1, writer.maxDoc()); //C assertEquals(1, writer.numDocs()); //C writer.close(); }