List of usage examples for org.apache.lucene.index IndexWriter getDirectory
public Directory getDirectory()
From source file:aos.lucene.admin.SearcherManager.java
License:Apache License
public SearcherManager(IndexWriter writer) throws IOException { this.writer = writer; IndexReader reader = DirectoryReader.open(writer.getDirectory()); currentSearcher = new IndexSearcher(reader); warm(currentSearcher);/*w w w .j a v a2s . c om*/ writer.getConfig().setMergedSegmentWarmer(new IndexWriter.IndexReaderWarmer() { public void warm(AtomicReader reader) throws IOException { SearcherManager.this.warm(new IndexSearcher(reader)); } }); }
From source file:axiom.db.utils.LuceneManipulator.java
License:Open Source License
public void compress(String dbDir) throws Exception { System.setProperty("org.apache.lucene.FSDirectory.class", "org.apache.lucene.store.TransFSDirectory"); File dbhome = new File(dbDir); String url = getUrl(dbhome);/*from w ww.jav a 2s . c om*/ FSDirectory indexDir = FSDirectory.getDirectory(dbhome, false); if (indexDir instanceof TransFSDirectory) { FSDirectory.setDisableLocks(true); TransFSDirectory d = (TransFSDirectory) indexDir; d.setDriverClass(DRIVER_CLASS); d.setUrl(url); d.setUser(null); d.setPassword(null); } File ndbhome = new File(dbhome.getParentFile(), dbhome.getName() + "_tmp"); File olddbhome = new File(dbhome.getParentFile(), dbhome.getName() + "_old"); FSDirectory nindexDir = FSDirectory.getDirectory(ndbhome, true); if (nindexDir instanceof TransFSDirectory) { FSDirectory.setDisableLocks(true); TransFSDirectory d = (TransFSDirectory) nindexDir; d.setDriverClass(DRIVER_CLASS); d.setUrl(url); d.setUser(null); d.setPassword(null); } IndexSearcher searcher = null; IndexWriter writer = null; LuceneManager lmgr = null; try { searcher = new IndexSearcher(indexDir); PerFieldAnalyzerWrapper a = LuceneManager.buildAnalyzer(); writer = IndexWriterManager.getWriter(nindexDir, a, true); final int numDocs = searcher.getIndexReader().numDocs(); HashSet deldocs = new HashSet(); HashMap infos = new HashMap(); for (int i = 0; i < numDocs; i++) { Document doc = searcher.doc(i); String delprop = doc.get(DeletedInfos.DELETED); final String id = doc.get(LuceneManager.ID) + DeletedInfos.KEY_SEPERATOR + doc.get(LuceneManager.LAYER_OF_SAVE); if (delprop != null && "true".equals(delprop)) { deldocs.add(id); } else { Object v; if ((v = infos.get(id)) == null) { infos.put(id, new Integer(i)); } else { final String lmod = doc.get(LuceneManager.LASTMODIFIED); final String lmod_prev = searcher.doc(((Integer) v).intValue()).get("_lastmodified"); if (lmod_prev == null || (lmod != null && lmod.compareTo(lmod_prev) > 0)) { infos.put(id, new Integer(i)); } } } } ArrayList listOfMaps = new ArrayList(); for (int i = 0; i < numDocs; i++) { Document doc = searcher.doc(i); String delprop = doc.get(DeletedInfos.DELETED); String layerStr = doc.get(LuceneManager.LAYER_OF_SAVE); int layer = -1; try { layer = Integer.parseInt(layerStr); } catch (Exception ex) { layer = -1; } final String id = doc.get(LuceneManager.ID) + DeletedInfos.KEY_SEPERATOR + doc.get(LuceneManager.LAYER_OF_SAVE); if (delprop != null && "true".equals(delprop)) { continue; } else if (id != null && deldocs.contains(id)) { continue; } Integer idx = (Integer) infos.get(id); if (idx != null && i != idx.intValue()) { continue; } Document ndoc = convertDocument(doc); if (ndoc != null) { writer.addDocument(ndoc); } } } catch (Exception ex) { ex.printStackTrace(); throw new RuntimeException(ex); } finally { if (searcher != null) { try { searcher.close(); } catch (Exception ex) { } } if (lmgr != null) { lmgr.shutdown(); lmgr = null; } indexDir.close(); SegmentInfos sinfos = IndexObjectsFactory.getFSSegmentInfos(indexDir); sinfos.clear(); IndexObjectsFactory.removeDeletedInfos(indexDir); } Connection conn = null; boolean exceptionOccured = false; try { if (writer != null) { conn = DriverManager.getConnection(url); conn.setAutoCommit(false); writer.close(); writer.flushCache(); LuceneManager.commitSegments(null, conn, dbhome, writer.getDirectory()); writer.finalizeTrans(); } } catch (Exception ex) { ex.printStackTrace(); exceptionOccured = true; throw new RuntimeException(ex); } finally { if (conn != null) { try { if (!conn.getAutoCommit()) { if (!exceptionOccured) { conn.commit(); } else { conn.rollback(); } } conn.close(); } catch (Exception ex) { ex.printStackTrace(); } conn = null; } nindexDir.close(); SegmentInfos sinfos = IndexObjectsFactory.getFSSegmentInfos(nindexDir); sinfos.clear(); IndexObjectsFactory.removeDeletedInfos(nindexDir); } File[] files = dbhome.listFiles(); for (int i = 0; i < files.length; i++) { if (!files[i].isDirectory()) { files[i].delete(); } } files = ndbhome.listFiles(); for (int i = 0; i < files.length; i++) { if (!files[i].isDirectory()) { File nfile = new File(dbhome, files[i].getName()); files[i].renameTo(nfile); } } if (!FileUtils.deleteDir(ndbhome)) { throw new Exception("Could not delete " + ndbhome); } }
From source file:axiom.objectmodel.dom.convert.LuceneConvertor.java
License:Open Source License
public void convert(Application app, File dbhome) throws Exception { FSDirectory indexDir = FSDirectory.getDirectory(dbhome, false); if (indexDir instanceof TransFSDirectory) { FSDirectory.setDisableLocks(true); TransFSDirectory d = (TransFSDirectory) indexDir; TransSource source = app.getTransSource(); d.setDriverClass(source.getDriverClass()); d.setUrl(source.getUrl());/* w w w . j av a2 s . c o m*/ d.setUser(source.getUser()); d.setPassword(source.getPassword()); } File ndbhome = new File(dbhome.getParentFile(), dbhome.getName() + "_tmp"); File olddbhome = new File(dbhome.getParentFile(), dbhome.getName() + "_old"); FSDirectory nindexDir = FSDirectory.getDirectory(ndbhome, true); if (nindexDir instanceof TransFSDirectory) { FSDirectory.setDisableLocks(true); TransFSDirectory d = (TransFSDirectory) nindexDir; TransSource source = app.getTransSource(); d.setDriverClass(source.getDriverClass()); d.setUrl(source.getUrl()); d.setUser(source.getUser()); d.setPassword(source.getPassword()); } IndexSearcher searcher = null; IndexWriter writer = null; LuceneManager lmgr = null; try { searcher = new IndexSearcher(indexDir); PerFieldAnalyzerWrapper a = LuceneManager.buildAnalyzer(); writer = IndexWriterManager.getWriter(nindexDir, a, true); final int numDocs = searcher.getIndexReader().numDocs(); HashSet deldocs = new HashSet(); HashMap infos = new HashMap(); for (int i = 0; i < numDocs; i++) { Document doc = searcher.doc(i); String delprop = doc.get(DeletedInfos.DELETED); String layerStr = doc.get(LuceneManager.LAYER_OF_SAVE); int layer = -1; try { layer = Integer.parseInt(layerStr); } catch (Exception ex) { layer = -1; } final String id = doc.get(LuceneManager.ID) + DeletedInfos.KEY_SEPERATOR + doc.get(LuceneManager.LAYER_OF_SAVE); if (delprop != null && "true".equals(delprop)/* && layer == DbKey.LIVE_LAYER*/) { deldocs.add(id); } else { Object v; if ((v = infos.get(id)) == null) { infos.put(id, new Integer(i)); } else { final String lmod = doc.get(LuceneManager.LASTMODIFIED); final String lmod_prev = searcher.doc(((Integer) v).intValue()).get("_lastmodified"); if (lmod_prev == null || (lmod != null && lmod.compareTo(lmod_prev) > 0)) { infos.put(id, new Integer(i)); } } } } ArrayList listOfMaps = new ArrayList(); for (int i = 0; i < numDocs; i++) { Document doc = searcher.doc(i); String delprop = doc.get(DeletedInfos.DELETED); String layerStr = doc.get(LuceneManager.LAYER_OF_SAVE); int layer = -1; try { layer = Integer.parseInt(layerStr); } catch (Exception ex) { layer = -1; } final String id = doc.get(LuceneManager.ID) + DeletedInfos.KEY_SEPERATOR + doc.get(LuceneManager.LAYER_OF_SAVE); if (delprop != null && "true".equals(delprop)) { continue; } else if (id != null && deldocs.contains(id)/* && layer == DbKey.LIVE_LAYER*/) { continue; } Integer idx = (Integer) infos.get(id); if (idx != null && i != idx.intValue()) { continue; } Document ndoc = convertDocument(doc); if (this.recordNodes) { listOfMaps.add(LuceneManager.luceneDocumentToMap(doc)); } if (ndoc != null) { writer.addDocument(ndoc); } } if (this.recordNodes) { lmgr = new LuceneManager(this.app, false, true); this.allNodes = new HashMap(); final int size = listOfMaps.size(); for (int i = 0; i < size; i++) { HashMap m = (HashMap) listOfMaps.get(i); INode n = lmgr.mapToNode(m); this.allNodes.put(n.getID(), getPath(n)); n = null; } } } catch (Exception ex) { ex.printStackTrace(); throw new RuntimeException(ex); } finally { if (searcher != null) { try { searcher.close(); } catch (Exception ex) { app.logError(ErrorReporter.errorMsg(this.getClass(), "convert"), ex); } } if (lmgr != null) { lmgr.shutdown(); lmgr = null; } indexDir.close(); SegmentInfos sinfos = IndexObjectsFactory.getFSSegmentInfos(indexDir); sinfos.clear(); IndexObjectsFactory.removeDeletedInfos(indexDir); } Connection conn = null; boolean exceptionOccured = false; try { if (writer != null) { TransSource ts = app.getTransSource(); conn = ts.getConnection(); DatabaseMetaData dmd = conn.getMetaData(); ResultSet rs = dmd.getColumns(null, null, "Lucene", "version"); if (!rs.next()) { final String alterTbl = "ALTER TABLE Lucene ADD version INT NOT NULL DEFAULT 1"; PreparedStatement pstmt = null; try { pstmt = conn.prepareStatement(alterTbl); pstmt.execute(); } catch (SQLException sqle) { app.logError(ErrorReporter.errorMsg(this.getClass(), "convert"), sqle); } finally { if (pstmt != null) { pstmt.close(); pstmt = null; } } } rs.close(); rs = null; writer.close(); writer.flushCache();//TODO:writer.writeSegmentsFile(); LuceneManager.commitSegments(conn, app, writer.getDirectory()); writer.finalizeTrans(); this.updateSQL(conn); } } catch (Exception ex) { ex.printStackTrace(); exceptionOccured = true; throw new RuntimeException(ex); } finally { if (conn != null) { try { if (!conn.getAutoCommit()) { if (!exceptionOccured) { conn.commit(); } else { conn.rollback(); } } conn.close(); } catch (Exception ex) { app.logError(ErrorReporter.errorMsg(this.getClass(), "convert"), ex); } conn = null; } nindexDir.close(); SegmentInfos sinfos = IndexObjectsFactory.getFSSegmentInfos(nindexDir); sinfos.clear(); IndexObjectsFactory.removeDeletedInfos(nindexDir); } if (!dbhome.renameTo(olddbhome)) { throw new Exception("Could not move the old version of the db into " + olddbhome); } if (!ndbhome.renameTo(dbhome)) { throw new Exception("Could not move the newer version of the db into " + dbhome); } File oldBlobDir = new File(olddbhome, "blob"); File newBlobDir = new File(ndbhome, "blob"); oldBlobDir.renameTo(newBlobDir); if (!FileUtils.deleteDir(olddbhome)) { throw new Exception("Could not delete the old version of the db at " + olddbhome); } }
From source file:com.baidu.rigel.biplatform.tesseract.isservice.index.service.IndexWriterFactory.java
License:Open Source License
/** * destoryWriters//w w w.j a v a2 s. c o m * * @param idxPath * * @throws IOException * IO */ public static void destoryWriters(String idxPath) throws IOException { LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_BEGIN, "destoryWriters", "[idxPath:" + idxPath + "]")); IndexWriter indexWriter = null; if (INSTANCE.idxWriterMaps.containsKey(idxPath)) { indexWriter = INSTANCE.idxWriterMaps.get(idxPath); try { indexWriter.commit(); indexWriter.close(); } catch (IOException e) { if (IndexWriter.isLocked(indexWriter.getDirectory())) { IndexWriter.unlock(indexWriter.getDirectory()); } } INSTANCE.idxWriterMaps.remove(idxPath); } LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_END, "destoryWriters", "[idxPath:" + idxPath + "]")); }
From source file:com.baidu.rigel.biplatform.tesseract.isservice.index.service.IndexWriterFactory.java
License:Open Source License
/** * //from w w w . ja v a 2 s.c om * indexWriter * * @throws IOException * IO */ public static synchronized void destoryAllWriters() throws IOException { LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_BEGIN, "destoryAllWriters", "[no param]")); for (String key : INSTANCE.idxWriterMaps.keySet()) { IndexWriter writer = INSTANCE.idxWriterMaps.get(key); try { writer.commit(); writer.close(); } catch (IOException e) { if (IndexWriter.isLocked(writer.getDirectory())) { IndexWriter.unlock(writer.getDirectory()); } } INSTANCE.idxWriterMaps.remove(key); } LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_END, "destoryAllWriters", "[no param]")); }
From source file:com.bdaum.zoom.lal.internal.LireActivator.java
License:Open Source License
public void releaseIndexWriter(File indexPath, boolean force) throws CorruptIndexException, IOException { synchronized (indexWriterMap) { IndexWriterEntry entry = indexWriterMap.get(indexPath); if (entry != null) { if (--entry.count <= 0 || force) { try { IndexWriter writer = entry.writer; @SuppressWarnings("resource") Directory directory = writer.getDirectory(); writer.close();//www .j av a 2 s. c o m directory.deleteFile("write.lock"); //$NON-NLS-1$ directory.close(); } finally { indexWriterMap.remove(indexPath); } } } } }
From source file:com.bugull.mongo.lucene.BuguIndex.java
License:Apache License
public void close() { ThreadUtil.safeClose(executor);/*from w w w. j a v a2 s .com*/ ThreadUtil.safeClose(scheduler); if (clusterConfig != null) { clusterConfig.invalidate(); } Map<String, IndexWriter> map = IndexWriterCache.getInstance().getAll(); for (IndexWriter writer : map.values()) { if (writer != null) { Directory dir = writer.getDirectory(); try { writer.commit(); writer.close(true); } catch (CorruptIndexException ex) { logger.error("Can not commit and close the lucene index", ex); } catch (IOException ex) { logger.error("Can not commit and close the lucene index", ex); } finally { try { if (dir != null && IndexWriter.isLocked(dir)) { IndexWriter.unlock(dir); } } catch (IOException ex) { logger.error("Can not unlock the lucene index", ex); } } } } }
From source file:com.fuerve.villageelder.indexing.IndexerTest.java
License:Apache License
/** * Test method for {@link com.fuerve.villageelder.indexing.Indexer#Indexer(org.apache.lucene.store.Directory, org.apache.lucene.store.Directory)}. *///from w w w . j a va 2 s . c om @Test public final void testIndexerDirectoryDirectory() throws Exception { RAMDirectory indexDirectory = new RAMDirectory(); RAMDirectory taxonomyDirectory = new RAMDirectory(); Field idField = IndexManager.class.getDeclaredField("indexDirectory"); Field tdField = IndexManager.class.getDeclaredField("taxonomyDirectory"); Field iwField = IndexManager.class.getDeclaredField("indexWriter"); Field twField = IndexManager.class.getDeclaredField("taxonomyWriter"); Field stField = IndexManager.class.getDeclaredField("stringDirectories"); Field initField = IndexManager.class.getDeclaredField("initialized"); Field imField = Indexer.class.getDeclaredField("indexManager"); idField.setAccessible(true); tdField.setAccessible(true); iwField.setAccessible(true); twField.setAccessible(true); stField.setAccessible(true); initField.setAccessible(true); imField.setAccessible(true); Indexer target = new Indexer(indexDirectory, taxonomyDirectory); IndexManager testManager = (IndexManager) imField.get(target); // TEST 1: A newly constructed Indexer believes itself // to be uninitialized, as indicated by the 'initialized' // field. boolean initActual = initField.getBoolean(testManager); assertFalse(initActual); target.initializeIndex(); Directory idActual = (Directory) idField.get(testManager); Directory tdActual = (Directory) tdField.get(testManager); IndexWriter iwActual = (IndexWriter) iwField.get(testManager); TaxonomyWriter twActual = (TaxonomyWriter) twField.get(testManager); boolean stActual = (Boolean) stField.get(testManager); initActual = initField.getBoolean(testManager); // TEST 2: The Indexer's index directory is what was passed in. assertEquals(indexDirectory, idActual); // TEST 3: The Indexer's taxonomy directory is what was passed in. assertEquals(taxonomyDirectory, tdActual); // TEST 4: The IndexWriter's directory is what was passed in. assertEquals(indexDirectory, iwActual.getDirectory()); // TEST 5: The taxonomy index is initialized afresh with no categories // in it. assertEquals(1, twActual.getSize()); // TEST 6: An Indexer constructed with Directories does not // believe that it needs to construct new Directories from string // pathnames. assertEquals(false, stActual); // TEST 7: The Indexer's initialized field is true after it // has been initialized. assertEquals(true, initActual); target.dispose(); // TEST 8: The Indexer's index writer is null after it has // been disposed. iwActual = (IndexWriter) iwField.get(testManager); assertEquals(null, iwActual); // TEST 9: The Indexer's taxonomy writer is null after it // has been disposed. twActual = (TaxonomyWriter) twField.get(testManager); assertEquals(null, twActual); // TEST 10: The Indexer's initialized flag is false after // it has been disposed. initActual = initField.getBoolean(testManager); assertEquals(false, initActual); }
From source file:com.fuerve.villageelder.indexing.IndexManagerTest.java
License:Apache License
/** * Test method for {@link com.fuerve.villageelder.indexing.IndexManager#IndexManager(org.apache.lucene.store.Directory, org.apache.lucene.store.Directory)}. * @throws Exception //from w w w . ja va2 s . co m */ @Test public final void testIndexManagerDirectoryDirectory() throws Exception { RAMDirectory indexDirectory = new RAMDirectory(); RAMDirectory taxonomyDirectory = new RAMDirectory(); Field idField = IndexManager.class.getDeclaredField("indexDirectory"); Field tdField = IndexManager.class.getDeclaredField("taxonomyDirectory"); Field iwField = IndexManager.class.getDeclaredField("indexWriter"); Field twField = IndexManager.class.getDeclaredField("taxonomyWriter"); Field stField = IndexManager.class.getDeclaredField("stringDirectories"); Field initField = IndexManager.class.getDeclaredField("initialized"); idField.setAccessible(true); tdField.setAccessible(true); iwField.setAccessible(true); twField.setAccessible(true); stField.setAccessible(true); initField.setAccessible(true); IndexManager target = new IndexManager(indexDirectory, taxonomyDirectory); // TEST 1: A newly constructed IndexManager believes itself // to be uninitialized, as indicated by the 'initialized' // field. boolean initActual = initField.getBoolean(target); assertFalse(initActual); target.initializeIndex(); Directory idActual = (Directory) idField.get(target); Directory tdActual = (Directory) tdField.get(target); IndexWriter iwActual = (IndexWriter) iwField.get(target); TaxonomyWriter twActual = (TaxonomyWriter) twField.get(target); boolean stActual = (Boolean) stField.get(target); initActual = initField.getBoolean(target); // TEST 2: The IndexManager's index directory is what was passed in. assertEquals(indexDirectory, idActual); // TEST 3: The IndexManager's taxonomy directory is what was passed in. assertEquals(taxonomyDirectory, tdActual); // TEST 4: The IndexWriter's directory is what was passed in. assertEquals(indexDirectory, iwActual.getDirectory()); // TEST 5: The taxonomy index is initialized afresh with no categories // in it. assertEquals(1, twActual.getSize()); // TEST 6: An IndexManager constructed with Directories does not // believe that it needs to construct new Directories from string // pathnames. assertEquals(false, stActual); // TEST 7: The IndexManager's initialized field is true after it // has been initialized. assertEquals(true, initActual); target.dispose(); // TEST 8: The IndexManager's index writer is null after it has // been disposed. iwActual = (IndexWriter) iwField.get(target); assertEquals(null, iwActual); // TEST 9: The IndexManager's taxonomy writer is null after it // has been disposed. twActual = (TaxonomyWriter) twField.get(target); assertEquals(null, twActual); // TEST 10: The IndexManager's initialized flag is false after // it has been disposed. initActual = initField.getBoolean(target); assertEquals(false, initActual); }
From source file:com.gemstone.gemfire.cache.lucene.internal.directory.DumpDirectoryFiles.java
License:Apache License
@Override public void execute(FunctionContext context) { RegionFunctionContext ctx = (RegionFunctionContext) context; if (!(context.getArguments() instanceof String[])) { throw new IllegalArgumentException("Arguments should be a string array"); }/*from w ww .j ava 2 s . com*/ String[] args = (String[]) context.getArguments(); if (args.length != 2) { throw new IllegalArgumentException("Expected 2 arguments: exportLocation, indexName"); } String exportLocation = args[0]; String indexName = args[1]; final Region<Object, Object> region = ctx.getDataSet(); LuceneService service = LuceneServiceProvider.get(ctx.getDataSet().getCache()); InternalLuceneIndex index = (InternalLuceneIndex) service.getIndex(indexName, region.getFullPath()); if (index == null) { throw new IllegalStateException("Index not found for region " + region + " index " + indexName); } final RepositoryManager repoManager = index.getRepositoryManager(); try { final Collection<IndexRepository> repositories = repoManager.getRepositories(ctx); repositories.stream().forEach(repo -> { final IndexWriter writer = repo.getWriter(); RegionDirectory directory = (RegionDirectory) writer.getDirectory(); FileSystem fs = directory.getFileSystem(); String bucketName = index.getName() + "_" + repo.getRegion().getFullPath(); bucketName = bucketName.replace("/", "_"); File bucketDirectory = new File(exportLocation, bucketName); bucketDirectory.mkdirs(); fs.export(bucketDirectory); }); context.getResultSender().lastResult(null); } catch (BucketNotFoundException e) { throw new FunctionException(e); } }