List of usage examples for org.apache.lucene.index IndexWriter commit
@Override public final long commit() throws IOException
Commits all pending changes (added and deleted documents, segment merges, added indexes, etc.) to the index, and syncs all referenced index files, such that a reader will see the changes and the index updates will survive an OS or machine crash or power loss.
From source file:io.datalayer.lucene.index.LuceneLifecycleTest.java
License:Apache License
/** * #1 Run before every test//from w w w .jav a 2 s . co m * * #2 Create IndexWriter * * #3 Add documents * * #4 Create new searcher * * #5 Build simple single-term query * * #6 Get number of hits * * #7 Verify writer document count * * #8 Verify reader document count */ @Test public void testDeleteBeforeOptimize() throws IOException { IndexWriter writer = getWriter(); assertEquals(2, writer.numDocs()); writer.deleteDocuments(new Term("id", "1")); writer.commit(); assertTrue(writer.hasDeletions()); assertEquals(2, writer.maxDoc()); assertEquals(1, writer.numDocs()); writer.close(); }
From source file:io.datalayer.lucene.index.LuceneLifecycleTest.java
License:Apache License
/** * #1 Index contains deletions//from w w w. j av a 2 s . co m * * #2 1 indexed document, 1 deleted document * * #3 Optimize compacts deletes */ @Test public void testDeleteAfterOptimize() throws IOException { IndexWriter writer = getWriter(); assertEquals(2, writer.numDocs()); writer.deleteDocuments(new Term("id", "1")); writer.forceMerge(1); writer.commit(); assertFalse(writer.hasDeletions()); assertEquals(1, writer.maxDoc()); assertEquals(1, writer.numDocs()); writer.close(); }
From source file:io.datalayer.lucene.index.LuceneSimple.java
License:Apache License
private static void index(String datafolder, String indexfolder) throws CorruptIndexException, LockObtainFailedException, IOException { Analyzer a = new StandardAnalyzer(Version.LUCENE_46); Directory d = FSDirectory.open(new File(indexfolder)); IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_46, new StandardAnalyzer(Version.LUCENE_46)); IndexWriter indexWriter = new IndexWriter(d, config); Document doc = new Document(); // Fieldable contentfield=new Field("content", new // FileReader(datafolder)); // doc.add(contentfield); // Fieldable namefield=new Field("filename",datafolder, Store.YES, // Index.NOT_ANALYZED); // doc.add(namefield); indexWriter.addDocument(doc);/* www .java 2 s . c o m*/ indexWriter.commit(); }
From source file:io.jpress.searcher.LuceneSearcher.java
License:LGPL
@Override public void addBean(SearcherBean bean) { try {//from w w w . ja va2 s.c om IndexWriter indexWriter = createIndexWriter(); indexWriter.addDocument(createDocument(bean)); indexWriter.commit(); indexWriter.close(); } catch (IOException e) { e.printStackTrace(); } }
From source file:io.jpress.searcher.LuceneSearcher.java
License:LGPL
@Override public void deleteBean(String beanId) { try {/*from ww w.j a va2s. co m*/ IndexWriter indexWriter = createIndexWriter(); Term term = new Term("sid", beanId); indexWriter.deleteDocuments(term); indexWriter.commit(); indexWriter.close(); } catch (IOException e) { e.printStackTrace(); } }
From source file:io.jpress.searcher.LuceneSearcher.java
License:LGPL
@Override public void updateBean(SearcherBean bean) { try {/*from w w w. j a va 2s . c o m*/ IndexWriter indexWriter = createIndexWriter(); Term term = new Term("sid", bean.getSid()); indexWriter.updateDocument(term, createDocument(bean)); indexWriter.commit(); indexWriter.close(); } catch (IOException e) { e.printStackTrace(); } }
From source file:io.yucca.lucene.FieldRemover.java
License:Apache License
/** * Remove fields from an index. All readers and writer are closed on * completion or on an exception.//from ww w . j a va 2 s. c om * * @param reader * IndexReader * @param writer * IndexWriter * File destination index directory * @param fields * String[] fields to remove */ public void removeFields(IndexReader reader, IndexWriter writer, String[] fields) { Set<String> removals = toTrimmedSet(fields); List<AtomicReaderContext> leaves = reader.leaves(); AtomicReader wrappedLeaves[] = new AtomicReader[leaves.size()]; for (int i = 0; i < leaves.size(); i++) { wrappedLeaves[i] = new FieldFilterAtomicReader(leaves.get(i).reader(), removals, true); } try { MultiReader mr = new MultiReader(wrappedLeaves, true); writer.addIndexes(mr); writer.commit(); writer.close(); mr.close(); } catch (IOException e) { log.error("Writing new index failed.", e); } finally { IOUtils.closeWhileHandlingException(reader); IOUtils.closeWhileHandlingException(writer); IOUtils.closeWhileHandlingException(writer.getDirectory()); } }
From source file:it.drwolf.ridire.index.sketch.AsyncSketchCreator.java
License:Apache License
private void addDocument(Map<String, SketchResult> sr, String lemma, IndexWriter indexWriter, String sketch, String type, String functional, String semantic, String goodFor) throws CorruptIndexException, IOException { Document d = new Document(); List<SketchResult> orderedSketchResults = this.getOrderedSketchResults(sr.values()); StringBuffer sb = new StringBuffer(); long fA = 0L; for (SketchResult r : orderedSketchResults) { fA = r.getfA();//from w ww.java2s . com sb.append(r.getCollocata() + "\t" + r.getfAB() + "\t" + r.getScore() + "\t" + fA + "\t" + r.getfB() + "\t" + "\n"); } d.add(new Field("overallfrequency", fA + "", Store.YES, Index.NO)); d.add(new Field("tabella", sb.toString(), Store.YES, Index.NO)); d.add(new Field("lemma", lemma, Store.NO, Index.NOT_ANALYZED_NO_NORMS)); d.add(new Field("sketch", sketch, Field.Store.YES, Index.NOT_ANALYZED_NO_NORMS)); d.add(new Field("goodFor", goodFor, Field.Store.YES, Index.NOT_ANALYZED_NO_NORMS)); d.add(new Field("type", type, Field.Store.NO, Index.NOT_ANALYZED_NO_NORMS)); if (functional != null) { d.add(new Field("functional", functional, Field.Store.NO, Index.NOT_ANALYZED_NO_NORMS)); } if (semantic != null) { d.add(new Field("semantic", semantic, Field.Store.NO, Index.NOT_ANALYZED_NO_NORMS)); } if (functional == null && semantic == null) { d.add(new Field("allcorpora", "yes", Field.Store.NO, Index.NOT_ANALYZED_NO_NORMS)); } indexWriter.addDocument(d); indexWriter.commit(); }
From source file:it.drwolf.ridire.index.sketch.AsyncSketchCreator.java
License:Apache License
@Asynchronous public QuartzTriggerHandle updateSketches(IndexWriter indexWriter, List<String> itemsToBeProcessed, String cqpExecutable, String cqpRegistry, String cqpCorpusName, String goodFor1, String goodFor2) { int toBeProcessed = itemsToBeProcessed.size(); int count = 0; this.cqpExecutable = cqpExecutable; this.cqpRegistry = cqpRegistry; this.cqpCorpusName = cqpCorpusName; QuartzTriggerHandle handle = new QuartzTriggerHandle("RIDIRE sketchcreator"); Set<String> functionalMetadata = this.localResourcesManager.getAllFunctionalMetadataMap().keySet(); Set<String> semanticMetadata = this.localResourcesManager.getAllSemanticMetadataMap().keySet(); try {/* ww w.ja va2s.com*/ System.out.println("Sketchcreator update running " + this.toString()); for (String lemma : itemsToBeProcessed) { ++count; for (Sketch s : SketchList.getSketchesToUpdate()) { BooleanQuery bq = new BooleanQuery(); TermQuery tqLemma = new TermQuery(new Term("lemma", lemma)); bq.add(tqLemma, Occur.MUST); if (s.getName().startsWith("pp_")) { PrefixQuery prefixQuery = new PrefixQuery(new Term("sketch", "pp_")); bq.add(prefixQuery, Occur.MUST); } else { TermQuery sq = new TermQuery(new Term("sketch", s.getName())); bq.add(sq, Occur.MUST); } // remove wrongly assigned sketches indexWriter.deleteDocuments(bq); } indexWriter.commit(); for (Sketch s : SketchList.getSketchesToUpdate()) { // recreate sketches for (String functionalMetadatum : functionalMetadata) { HashMap<String, SketchResult> sr = this.extractSingleLemmaSketches(lemma, functionalMetadatum, null, s, indexWriter); if (!s.isTrinary()) { this.addDocument(sr, lemma, indexWriter, s.getName(), "WORD_SKETCH", functionalMetadatum, null, s.getGoodFor()); } } for (String semanticMetadatum : semanticMetadata) { HashMap<String, SketchResult> sr = this.extractSingleLemmaSketches(lemma, null, semanticMetadatum, s, indexWriter); if (!s.isTrinary()) { this.addDocument(sr, lemma, indexWriter, s.getName(), "WORD_SKETCH", null, semanticMetadatum, s.getGoodFor()); } } HashMap<String, SketchResult> sr = this.extractSingleLemmaSketches(lemma, null, null, s, indexWriter); if (!s.isTrinary()) { this.addDocument(sr, lemma, indexWriter, s.getName(), "WORD_SKETCH", null, null, s.getGoodFor()); } } System.out.println(goodFor1 + "-" + goodFor2 + " elaborati: " + count + " su " + toBeProcessed); } System.out.println("Sketchcreator update done " + this.toString()); } catch (CorruptIndexException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } System.out.println("Sketchcreator done"); return handle; }
From source file:it.unibz.instasearch.indexing.StorageIndexer.java
License:Open Source License
/** * Delethe the whole index//from w ww . ja va 2 s. c o m * @throws Exception */ public void deleteIndex() throws Exception { RetryingRunnable runnable = new RetryingRunnable() { public void run() throws Exception { IndexWriter w = createIndexWriter(true); // open for writing and close (make empty) w.deleteAll(); w.commit(); w.close(true); Directory dir = getIndexDir(); for (String file : dir.listAll()) { if (dir.fileExists(file)) // still exits { dir.sync(file); dir.deleteFile(file); } } dir.close(); } public boolean handleException(Throwable e) { return true; } }; changeListener.onIndexReset(); // close searcher because index is deleted runRetryingRunnable(runnable); // delete index with retry }