List of usage examples for org.apache.lucene.index IndexWriter commit
@Override public final long commit() throws IOException
Commits all pending changes (added and deleted documents, segment merges, added indexes, etc.) to the index, and syncs all referenced index files, such that a reader will see the changes and the index updates will survive an OS or machine crash or power loss.
From source file:org.eu.bitzone.Leia.java
License:Apache License
/** * Optimize the index.//from ww w . j a v a2s . c om */ public void optimize(final Object dialog) { final Thread t = new Thread() { @Override public void run() { IndexWriter iw = null; final Object optimizeButton = find(dialog, "optimizeButton"); setBoolean(optimizeButton, "enabled", false); final Object closeButton = find(dialog, "closeButton"); setBoolean(closeButton, "enabled", false); final Object msg = find(dialog, "msg"); final Object stat = find(dialog, "stat"); setString(stat, "text", "Running ..."); final PanelPrintWriter ppw = new PanelPrintWriter(Leia.this, msg); final boolean useCompound = getBoolean(find(dialog, "optCompound"), "selected"); final boolean expunge = getBoolean(find(dialog, "optExpunge"), "selected"); final boolean keep = getBoolean(find(dialog, "optKeepAll"), "selected"); final boolean useLast = getBoolean(find(dialog, "optLastCommit"), "selected"); final Object tiiSpin = find(dialog, "tii"); final Object segnumSpin = find(dialog, "segnum"); final int tii = Integer.parseInt(getString(tiiSpin, "text")); final int segnum = Integer.parseInt(getString(segnumSpin, "text")); try { if (is != null) { is = null; } if (ir != null) { ir.close(); } if (ar != null) { ar.close(); } IndexDeletionPolicy policy; if (keep) { policy = new KeepAllIndexDeletionPolicy(); } else { policy = new KeepLastIndexDeletionPolicy(); } final IndexWriterConfig cfg = new IndexWriterConfig(LV, new WhitespaceAnalyzer(LV)); if (!useLast) { final IndexCommit ic = ((DirectoryReader) ir).getIndexCommit(); if (ic != null) { cfg.setIndexCommit(ic); } } cfg.setIndexDeletionPolicy(policy); cfg.setTermIndexInterval(tii); final MergePolicy p = cfg.getMergePolicy(); cfg.setUseCompoundFile(useCompound); if (useCompound) { p.setNoCFSRatio(1.0); } cfg.setInfoStream(ppw); iw = new IndexWriter(dir, cfg); final long startSize = Util.calcTotalFileSize(pName, dir); final long startTime = System.currentTimeMillis(); if (expunge) { iw.forceMergeDeletes(); } else { if (segnum > 1) { iw.forceMerge(segnum, true); } else { iw.forceMerge(1, true); } } iw.commit(); final long endTime = System.currentTimeMillis(); final long endSize = Util.calcTotalFileSize(pName, dir); final long deltaSize = startSize - endSize; final String sign = deltaSize < 0 ? " Increased " : " Reduced "; final String sizeMsg = sign + Util.normalizeSize(Math.abs(deltaSize)) + Util.normalizeUnit(Math.abs(deltaSize)); final String timeMsg = String.valueOf(endTime - startTime) + " ms"; showStatus(sizeMsg + " in " + timeMsg); iw.close(); setString(stat, "text", "Finished OK."); } catch (final Exception e) { e.printStackTrace(ppw); setString(stat, "text", "ERROR - aborted."); errorMsg("ERROR optimizing: " + e.toString()); if (iw != null) { try { iw.close(); } catch (final Exception e1) { } } } finally { setBoolean(closeButton, "enabled", true); } try { actionReopen(); is = new IndexSearcher(ir); // add dialog again add(dialog); } catch (final Exception e) { e.printStackTrace(ppw); errorMsg("ERROR reopening after optimize:\n" + e.getMessage()); } } }; t.start(); }
From source file:org.exist.indexing.lucene.LuceneIndexWorker.java
License:Open Source License
/** * Optimize the Lucene index by merging all segments into a single one. This * may take a while and write operations will be blocked during the optimize. *///ww w .j a v a 2s. co m public void optimize() { IndexWriter writer = null; try { writer = index.getWriter(true); writer.forceMerge(1, true); writer.commit(); } catch (IOException e) { LOG.warn("An exception was caught while optimizing the lucene index: " + e.getMessage(), e); } finally { index.releaseWriter(writer); } }
From source file:org.exist.xquery.modules.mpeg7.net.semanticmetadata.lire.indexing.tools.ProximityHashingIndexor.java
License:Open Source License
public void run() { // do it .../*ww w .ja v a 2s . c o m*/ try { IndexWriter indexWriter = LuceneUtils.createIndexWriter(indexPath, overwriteIndex, LuceneUtils.AnalyzerType.WhitespaceAnalyzer); for (Iterator<File> iterator = inputFiles.iterator(); iterator.hasNext();) { File inputFile = iterator.next(); if (verbose) System.out.println("Processing " + inputFile.getPath() + "."); if (verbose) System.out.println("Counting images."); run = 0; readFile(indexWriter, inputFile); if (verbose) System.out.printf("%d images found in the data file.\n", docCount); int numReps = 1000; // TODO: clever selection. if (numReps > docCount / 10) numReps = docCount / 10; if (verbose) System.out.printf("Selecting %d representative images for hashing.\n", numReps); representativesID = new HashSet<Integer>(numReps); while (representativesID.size() < numReps) { representativesID.add((int) Math.floor(Math.random() * (docCount - 1))); } representatives = new ArrayList<LireFeature>(numReps); docCount = 0; run = 1; if (verbose) System.out.println("Now getting representatives from the data file."); readFile(indexWriter, inputFile); docCount = 0; run = 2; if (verbose) System.out.println("Finally we start the indexing process, please wait ..."); readFile(indexWriter, inputFile); if (verbose) System.out.println("Indexing finished."); } indexWriter.commit(); indexWriter.close(); } catch (Exception e) { e.printStackTrace(); } }
From source file:org.fcrepo.migration.foxml.DirectoryScanningIDResolver.java
License:Apache License
/** * directory scanning ID resolver//from w w w . ja v a2s . c o m * @param cachedIndexDir the index directory. If it exists, the old cache will be used, if it doesn't a new * cache will be built at that location. If it is null, a new cache will be built in * the temp file space that will be deleted upon application shutdown. * @param dsRoot the datastream root * @throws IOException IO exception */ public DirectoryScanningIDResolver(final File cachedIndexDir, final File dsRoot) throws IOException { final File indexDir; if (cachedIndexDir == null) { final File temp = File.createTempFile("tempfile", "basedir"); temp.delete(); temp.mkdir(); indexDir = new File(temp, "index"); LOGGER.info("No index directory specified. Creating temporary index at \"" + indexDir.getAbsolutePath() + "\"."); Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { @Override public void run() { try { LOGGER.info("Deleting index directory at \"" + indexDir.getAbsolutePath() + "\"..."); FileUtils.deleteDirectory(indexDir); } catch (IOException e) { LOGGER.error("Unable to delete index directory at \"" + indexDir.getAbsolutePath() + "\"!", e); e.printStackTrace(); } } })); } else { indexDir = cachedIndexDir; } final Directory dir = FSDirectory.open(indexDir); if (indexDir.exists()) { LOGGER.warn("Index exists at \"" + indexDir.getPath() + "\" and will be used. " + "To clear index, simply delete this directory and re-run the application."); } else { final Analyzer analyzer = new StandardAnalyzer(); final IndexWriterConfig iwc = new IndexWriterConfig(Version.LUCENE_4_10_3, analyzer); iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE); final IndexWriter writer = new IndexWriter(dir, iwc); LOGGER.info("Builidng an index of all the datastreams in \"" + dsRoot.getPath() + "\"..."); indexDatastreams(writer, dsRoot); writer.commit(); writer.close(); } final IndexReader reader = DirectoryReader.open(FSDirectory.open(indexDir)); searcher = new IndexSearcher(reader); }
From source file:org.frontcache.cache.impl.LuceneIndexManager.java
License:Apache License
/** * Writes webResponse to index// w ww . j a v a2s .c om * @param response * @throws IOException */ void indexDoc(WebResponse response) throws IOException { IndexWriter iWriter = getIndexWriter(); Document doc = new Document(); String url = response.getUrl(); if (null == url) { logger.error("URL can't be null during index time for " + response); return; } doc.add(new StringField(URL_FIELD, url, Field.Store.YES)); doc.add(new StringField(DOMAIN_FIELD, response.getDomain(), Field.Store.YES)); if (null != response.getContent()) doc.add(new StoredField(BIN_FIELD, response.getContent())); // doc.add(new NumericDocValuesField(EXPIRE_DATE_FIELD, response.getExpireTimeMillis())); // TODO: store map ? doc.add(new StoredField(JSON_FIELD, gson.toJson(response), JSON_TYPE)); for (String tag : response.getTags()) doc.add(new StringField(TAGS_FIELD, tag, Field.Store.NO)); // tag is StringField to exact match try { iWriter.updateDocument(new Term(URL_FIELD, url), doc); } catch (IOException e) { logger.error("Error while in Lucene index operation: {}", e.getMessage(), e); } finally { try { iWriter.commit(); } catch (IOException ioEx) { logger.error("Error while commiting changes to Lucene index: {}", ioEx.getMessage(), ioEx); } } }
From source file:org.frontcache.cache.impl.LuceneIndexManager.java
License:Apache License
/** * Removes documents by url or tags//from w w w . j a va 2s .c o m * @param urlOrTag */ public void delete(String domain, String urlOrTag) { IndexWriter iWriter = null; try { iWriter = getIndexWriter(); if (iWriter == null) { return; } } catch (Exception e1) { logger.debug("Error during getting indexWriter. " + e1.getMessage()); return; } try { // Query domainQuery = new TermQuery(new Term(DOMAIN_FIELD, domain)); Query urlQuery = new TermQuery(new Term(URL_FIELD, urlOrTag)); Query tagsQuery = new TermQuery(new Term(TAGS_FIELD, urlOrTag)); BooleanQuery.Builder booleanQuery = new BooleanQuery.Builder(); // booleanQuery.add(domainQuery, Occur.MUST); booleanQuery.add(urlQuery, Occur.SHOULD); booleanQuery.add(tagsQuery, Occur.SHOULD); long count = iWriter.deleteDocuments(booleanQuery.build()); logger.debug("Removed {} documents for {}.", count, urlOrTag); } catch (IOException e) { logger.error(e.getMessage(), e); } finally { try { iWriter.commit(); } catch (IOException e) { logger.error(e.getMessage(), e); } } }
From source file:org.frontcache.cache.impl.LuceneIndexManager.java
License:Apache License
public void delete(String urlOrTag) { IndexWriter iWriter = null; try {/*from w w w . ja va 2 s . com*/ iWriter = getIndexWriter(); if (iWriter == null) { return; } } catch (Exception e1) { logger.debug("Error during getting indexWriter. " + e1.getMessage()); return; } try { Query urlQuery = new TermQuery(new Term(URL_FIELD, urlOrTag)); Query tagsQuery = new TermQuery(new Term(TAGS_FIELD, urlOrTag)); BooleanQuery.Builder booleanQuery = new BooleanQuery.Builder(); booleanQuery.add(urlQuery, Occur.SHOULD); booleanQuery.add(tagsQuery, Occur.SHOULD); long count = iWriter.deleteDocuments(booleanQuery.build()); logger.debug("Removed {} documents for {}.", count, urlOrTag); } catch (IOException e) { logger.error(e.getMessage(), e); } finally { try { iWriter.commit(); } catch (IOException e) { logger.error(e.getMessage(), e); } } }
From source file:org.frontcache.cache.impl.LuceneIndexManager.java
License:Apache License
/** * Removes documents by url or tags//from w w w .j ava 2s . c o m * @param urlOrTag */ public void deleteAll(String domain) { if (null == domain) { logger.error("Cant delete all with null domain"); return; } IndexWriter iWriter = null; try { iWriter = getIndexWriter(); if (iWriter == null) { return; } } catch (Exception e1) { logger.debug("Error during getting indexWriter. " + e1.getMessage()); return; } try { logger.debug("Removing all documents for {}.", domain); Query domainQuery = new TermQuery(new Term(DOMAIN_FIELD, domain)); long count = iWriter.deleteDocuments(domainQuery); logger.debug("Removed {} documents for {}.", count, domain); } catch (IOException e) { logger.error(e.getMessage(), e); } finally { try { iWriter.commit(); } catch (IOException e) { logger.error(e.getMessage(), e); } } }
From source file:org.genemania.plugin.completion.CompletionPanel.java
License:Open Source License
private GeneCompletionProvider2 createEmptyProvider() { try {// ww w .j a va 2 s . c o m Directory directory = new RAMDirectory(); Analyzer analyzer = LuceneGeneMediator.createDefaultAnalyzer(); IndexWriter writer = new IndexWriter(directory, analyzer, MaxFieldLength.UNLIMITED); writer.commit(); writer.close(); IndexSearcher searcher = new IndexSearcher(directory, true); return new GeneCompletionProvider2(searcher, analyzer, new Organism()); } catch (IOException e) { return null; } }
From source file:org.genemania.plugin.data.lucene.LuceneDataSet.java
License:Open Source License
private Directory createEmptyIndex() { RAMDirectory directory = new RAMDirectory(); try {// ww w . j a v a 2 s . c om IndexWriter writer = new IndexWriter(directory, new StandardAnalyzer(Version.LUCENE_29), true, MaxFieldLength.UNLIMITED); writer.commit(); writer.close(); } catch (CorruptIndexException e) { } catch (LockObtainFailedException e) { } catch (IOException e) { } return directory; }