List of usage examples for org.apache.lucene.index IndexWriter commit
@Override public final long commit() throws IOException
Commits all pending changes (added and deleted documents, segment merges, added indexes, etc.) to the index, and syncs all referenced index files, such that a reader will see the changes and the index updates will survive an OS or machine crash or power loss.
From source file:org.apache.nifi.provenance.lucene.DeleteIndexAction.java
License:Apache License
@Override public File execute(final File expiredFile) throws IOException { // count the number of records and determine the max event id that we are deleting. final long numDeleted = 0; long maxEventId = -1L; try (final RecordReader reader = RecordReaders.newRecordReader(expiredFile, repository.getAllLogFiles(), Integer.MAX_VALUE)) { maxEventId = reader.getMaxEventId(); } catch (final IOException ioe) { logger.warn("Failed to obtain max ID present in journal file {}", expiredFile.getAbsolutePath()); }/*w w w . j av a2 s. c om*/ // remove the records from the index final List<File> indexDirs = indexConfiguration.getIndexDirectories(expiredFile); for (final File indexingDirectory : indexDirs) { final Term term = new Term(FieldNames.STORAGE_FILENAME, LuceneUtil.substringBefore(expiredFile.getName(), ".")); boolean deleteDir = false; final EventIndexWriter writer = indexManager.borrowIndexWriter(indexingDirectory); try { final IndexWriter indexWriter = writer.getIndexWriter(); indexWriter.deleteDocuments(term); indexWriter.commit(); final int docsLeft = indexWriter.numDocs(); deleteDir = docsLeft <= 0; logger.debug("After expiring {}, there are {} docs left for index {}", expiredFile, docsLeft, indexingDirectory); } finally { indexManager.returnIndexWriter(writer); } // we've confirmed that all documents have been removed. Delete the index directory. if (deleteDir) { indexManager.removeIndex(indexingDirectory); indexConfiguration.removeIndexDirectory(indexingDirectory); deleteDirectory(indexingDirectory); logger.info("Removed empty index directory {}", indexingDirectory); } } // Update the minimum index to 1 more than the max Event ID in this file. if (maxEventId > -1L) { indexConfiguration.setMinIdIndexed(maxEventId + 1L); } logger.info("Deleted Indices for Expired Provenance File {} from {} index files; {} documents removed", expiredFile, indexDirs.size(), numDeleted); return expiredFile; }
From source file:org.apache.ofbiz.content.search.DocumentIndexer.java
License:Apache License
@Override public void run() { IndexWriter indexWriter = null; int uncommittedDocs = 0; while (true) { LuceneDocument ofbizDocument;// w w w .j a va2 s . c om try { // Execution will pause here until the queue receives a LuceneDocument for indexing ofbizDocument = documentIndexQueue.take(); } catch (InterruptedException e) { Debug.logError(e, module); if (indexWriter != null) { try { indexWriter.close(); indexWriter = null; } catch (IOException ioe) { Debug.logError(ioe, module); } } break; } Term documentIdentifier = ofbizDocument.getDocumentIdentifier(); Document document = ofbizDocument.prepareDocument(this.delegator); if (indexWriter == null) { try { StandardAnalyzer analyzer = new StandardAnalyzer(); analyzer.setVersion(SearchWorker.getLuceneVersion()); indexWriter = new IndexWriter(this.indexDirectory, new IndexWriterConfig(analyzer)); } catch (CorruptIndexException e) { Debug.logError("Corrupted lucene index: " + e.getMessage(), module); break; } catch (LockObtainFailedException e) { Debug.logError("Could not obtain Lock on lucene index " + e.getMessage(), module); // TODO: put the thread to sleep waiting for the locked to be released break; } catch (IOException e) { Debug.logError(e.getMessage(), module); break; } } try { if (document == null) { indexWriter.deleteDocuments(documentIdentifier); if (Debug.infoOn()) Debug.logInfo(getName() + ": deleted Lucene document: " + ofbizDocument, module); } else { indexWriter.updateDocument(documentIdentifier, document); if (Debug.infoOn()) Debug.logInfo(getName() + ": indexed Lucene document: " + ofbizDocument, module); } } catch (Exception e) { Debug.logError(e, getName() + ": error processing Lucene document: " + ofbizDocument, module); if (documentIndexQueue.peek() == null) { try { indexWriter.close(); indexWriter = null; } catch (IOException ioe) { Debug.logError(ioe, module); } } continue; } uncommittedDocs++; if (uncommittedDocs == UNCOMMITTED_DOC_LIMIT || documentIndexQueue.peek() == null) { // limit reached or queue empty, time to commit try { indexWriter.commit(); } catch (IOException e) { Debug.logError(e, module); } uncommittedDocs = 0; } if (documentIndexQueue.peek() == null) { try { indexWriter.close(); indexWriter = null; } catch (IOException e) { Debug.logError(e, module); } } } }
From source file:org.apache.roller.weblogger.business.search.IndexManagerImpl.java
License:Apache License
private IndexOperation getSaveIndexOperation() { return new WriteToIndexOperation(this) { public void doRun() { Directory dir = getIndexDirectory(); Directory fsdir = getFSDirectory(true); IndexWriter writer = null; try { IndexWriterConfig config = new IndexWriterConfig(FieldConstants.LUCENE_VERSION, new LimitTokenCountAnalyzer(IndexManagerImpl.getAnalyzer(), IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL)); writer = new IndexWriter(fsdir, config); writer.addIndexes(new Directory[] { dir }); writer.commit(); indexConsistencyMarker.delete(); } catch (IOException e) { mLogger.error("Problem saving index to disk", e); // Delete the directory, since there was a problem saving the RAM contents getFSDirectory(true);//w ww .ja v a 2 s . c o m } finally { try { if (writer != null) { writer.close(); } } catch (IOException e1) { mLogger.warn("Unable to close IndexWriter."); } } } }; }
From source file:org.apache.servicemix.nmr.audit.lucene.LuceneIndexer.java
License:Apache License
/** * Drop objects from Lucene index// w w w . j av a 2 s .c om */ protected void remove(String[] ids) throws IOException { synchronized (directory) { if (ids != null && ids.length > 0) { IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), IndexWriter.MaxFieldLength.LIMITED); try { for (int i = 0; i < ids.length; i++) { writer.deleteDocuments(new Term(LuceneAuditor.FIELD_ID, ids[i])); } writer.commit(); } finally { writer.close(); } } } }
From source file:org.apache.servicemix.nmr.audit.lucene.LuceneIndexer.java
License:Apache License
/** * Add object to Lucene index/*from w w w. j a v a 2 s.c o m*/ */ public void add(Document lucDoc, String id) throws IOException { synchronized (directory) { remove(id); IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), IndexWriter.MaxFieldLength.LIMITED); try { writer.addDocument(lucDoc); writer.commit(); } finally { writer.close(); } } }
From source file:org.apache.solr.codecs.test.testDeleteDocs.java
License:Apache License
public static void main(String[] args) { try {// w w w. j a va 2 s . co m plaintextDir = assureDirectoryExists(new File(INDEX_ROOT_FOLDER)); //----------- index documents ------- StandardAnalyzer analyzer = new StandardAnalyzer(Version.LUCENE_4_10_0); IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_4_10_0, analyzer); // recreate the index on each execution config.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND); //config.setCodec(new SimpleTextCodec()); Properties props = new Properties(); FileInputStream fstream = new FileInputStream( "C:\\work\\search_engine\\codec\\solr410\\solr_codectest\\collection1\\conf\\kvstore.properties"); props.load(fstream); fstream.close(); ONSQLKVstoreHandler.getInstance().setKVStore("omega", props); ONSQLCodec codec = new ONSQLCodec(); config.setCodec(codec); config.setUseCompoundFile(false); Directory luceneDir = new ONSQLWrapperDirectory(new File(INDEX_ROOT_FOLDER)); IndexWriter writer = new IndexWriter(luceneDir, config); QueryParser queryParser = new QueryParser(Version.LUCENE_4_10_0, "title", analyzer); String search_word = "fourth"; Query query = queryParser.parse(search_word); writer.deleteDocuments(query); writer.commit(); writer.close(); searchIndex("title", search_word); } catch (Throwable te) { te.printStackTrace(); } }
From source file:org.apache.solr.codecs.test.testONSQLCodec.java
License:Apache License
public static void main(String[] args) { try {//from ww w.jav a 2 s .com plaintextDir = assureDirectoryExists(new File(INDEX_ROOT_FOLDER)); testUtil.initPropsONSQL(); //----------- index documents ------- StandardAnalyzer analyzer = new StandardAnalyzer(Version.LUCENE_4_10_1); IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_4_10_1, analyzer); // recreate the index on each execution config.setOpenMode(IndexWriterConfig.OpenMode.CREATE); //config.setCodec(new SimpleTextCodec()); ONSQLCodec codec = new ONSQLCodec(); config.setCodec(codec); config.setUseCompoundFile(false); Directory luceneDir = FSDirectory.open(plaintextDir); IndexWriter writer = new IndexWriter(luceneDir, config); writer.addDocument(Arrays.asList(new TextField("title", "The title of my first document", Store.YES), new TextField("content", "The content of the first document", Store.YES), new IntField("intval", 111111, Store.YES), new LongField("longval", 1111111111L, Store.YES))); writer.addDocument(Arrays.asList(new TextField("title", "The tAtle of the second document", Store.YES), new TextField("content", "The content of the second document", Store.YES), new IntField("intval", 222222, Store.YES), new LongField("longval", 222222222L, Store.YES))); writer.addDocument(Arrays.asList(new TextField("title", "The title of the third document", Store.YES), new TextField("content", "The content of the third document", Store.YES), new IntField("intval", 333333, Store.YES), new LongField("longval", 3333333333L, Store.YES))); writer.commit(); writer.close(); IndexReader reader = DirectoryReader.open(luceneDir); // now test for docs if (reader.numDocs() < 3) throw new IOException("amount of returned docs are less than indexed"); else System.out.println("test passed"); searchIndex("content", "third"); } catch (Throwable te) { te.printStackTrace(); } }
From source file:org.apache.solr.codecs.test.testONSQLWrapperDirectory.java
License:Apache License
public static void deleteDocs(String searchField, String searchString) throws IOException, ParseException { System.out.println("deleting docs for '" + searchString + "'"); IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_4_10_1, new StandardAnalyzer(Version.LUCENE_4_10_1)); config.setOpenMode(IndexWriterConfig.OpenMode.APPEND); ONSQLCodec codec = new ONSQLCodec(); config.setCodec(codec);/* w ww . j a va 2 s. c o m*/ config.setUseCompoundFile(false); Directory luceneDir = new ONSQLWrapperDirectory(new File(INDEX_ROOT_FOLDER)); IndexWriter writer = new IndexWriter(luceneDir, config); QueryParser queryParser = new QueryParser(Version.LUCENE_4_10_1, searchField, new StandardAnalyzer(Version.LUCENE_4_10_1)); Query query = queryParser.parse(searchString); writer.deleteDocuments(query); writer.commit(); writer.close(); luceneDir.close(); System.out.println("docs were deleted"); }
From source file:org.apache.solr.codecs.test.testSimpleTextCodec.java
License:Apache License
public static void main(String[] args) { try {//from ww w . j a va 2 s . co m plaintextDir = assureDirectoryExists(new File(INDEX_ROOT_FOLDER, "plaintext")); //----------- index documents ------- StandardAnalyzer analyzer = new StandardAnalyzer(Version.LUCENE_48); IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_48, analyzer); // recreate the index on each execution config.setOpenMode(IndexWriterConfig.OpenMode.CREATE); config.setCodec(new SimpleTextCodec()); config.setUseCompoundFile(false); Directory luceneDir = FSDirectory.open(plaintextDir); IndexWriter writer = new IndexWriter(luceneDir, config); writer.addDocument(Arrays.asList(new TextField("title", "The title of my first document", Store.YES), new TextField("content", "The content of the first document", Store.YES))); writer.addDocument(Arrays.asList(new TextField("title", "The tAtle of the second document", Store.YES), new TextField("content", "The content of the second document", Store.YES))); writer.addDocument(Arrays.asList(new TextField("title", "The title of the third document", Store.YES), new TextField("content", "The content of the third document", Store.YES))); writer.commit(); writer.close(); IndexReader reader = DirectoryReader.open(luceneDir); // now test for docs if (reader.numDocs() != 3) throw new IOException("amount of returned docs are less than indexed"); else System.out.println("test passed"); searchIndex("content", "third"); } catch (Throwable te) { te.printStackTrace(); } }
From source file:org.apache.solr.core.TestArbitraryIndexDir.java
License:Apache License
@Test public void testLoadNewIndexDir() throws IOException, ParserConfigurationException, SAXException { //add a doc in original index dir assertU(adoc("id", String.valueOf(1), "name", "name" + String.valueOf(1))); //create a new index dir and index.properties file File idxprops = new File(h.getCore().getDataDir() + SnapPuller.INDEX_PROPERTIES); Properties p = new Properties(); File newDir = new File(h.getCore().getDataDir() + "index_temp"); newDir.mkdirs();/*from ww w.ja va2 s . c o m*/ p.put("index", newDir.getName()); Writer os = null; try { os = new OutputStreamWriter(new FileOutputStream(idxprops), IOUtils.CHARSET_UTF_8); p.store(os, "index properties"); } catch (Exception e) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unable to write " + SnapPuller.INDEX_PROPERTIES, e); } finally { IOUtils.closeWhileHandlingException(os); } //add a doc in the new index dir Directory dir = newFSDirectory(newDir); IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(Version.LUCENE_40, new StandardAnalyzer(Version.LUCENE_40))); Document doc = new Document(); doc.add(new TextField("id", "2", Field.Store.YES)); doc.add(new TextField("name", "name2", Field.Store.YES)); iw.addDocument(doc); iw.commit(); iw.close(); //commit will cause searcher to open with the new index dir assertU(commit()); //new index dir contains just 1 doc. assertQ("return doc with id 2", req("id:2"), "*[count(//doc)=1]"); dir.close(); newDir.delete(); }