List of usage examples for org.apache.lucene.index IndexWriter getNextMerge
public synchronized MergePolicy.OneMerge getNextMerge()
From source file:aos.lucene.search.msc.ScoreTest.java
License:Apache License
private void indexSingleFieldDocs(Field[] fields) throws Exception { IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(Version.LUCENE_46), IndexWriter.MaxFieldLength.UNLIMITED); for (Field f : fields) { Document doc = new Document(); doc.add(f);/* w w w . j av a 2s. c o m*/ writer.addDocument(doc); } writer.merge(writer.getNextMerge()); writer.close(); }
From source file:aos.lucene.tools.BerkeleyDbIndexer.java
License:Apache License
public static void main(String[] args) throws IOException, DatabaseException { if (args.length != 1) { System.err.println("Usage: BerkeleyDbIndexer <index dir>"); System.exit(-1);/*from w ww. j ava 2s .c o m*/ } File indexFile = new File(args[0]); if (indexFile.exists()) { File[] files = indexFile.listFiles(); for (int i = 0; i < files.length; i++) if (files[i].getName().startsWith("__")) files[i].delete(); indexFile.delete(); } indexFile.mkdir(); EnvironmentConfig envConfig = new EnvironmentConfig(); DatabaseConfig dbConfig = new DatabaseConfig(); envConfig.setTransactional(true); envConfig.setInitializeCache(true); envConfig.setInitializeLocking(true); envConfig.setInitializeLogging(true); envConfig.setAllowCreate(true); envConfig.setThreaded(true); dbConfig.setAllowCreate(true); dbConfig.setType(DatabaseType.BTREE); Environment env = new Environment(indexFile, envConfig); Transaction txn = env.beginTransaction(null, null); Database index = env.openDatabase(txn, "__index__", null, dbConfig); Database blocks = env.openDatabase(txn, "__blocks__", null, dbConfig); txn.commit(); txn = env.beginTransaction(null, null); DbDirectory directory = new DbDirectory(txn, index, blocks); IndexWriter writer = new IndexWriter(directory, new StandardAnalyzer(Version.LUCENE_46), true, IndexWriter.MaxFieldLength.UNLIMITED); Document doc = new Document(); doc.add(new Field("contents", "The quick brown fox...", Field.Store.YES, Field.Index.ANALYZED)); writer.addDocument(doc); writer.merge(writer.getNextMerge()); writer.close(); directory.close(); txn.commit(); index.close(); blocks.close(); env.close(); LOGGER.info("Indexing Complete"); }
From source file:aos.lucene.tools.BerkeleyDbJEIndexer.java
License:Apache License
public static void main(String[] args) throws IOException, DatabaseException { if (args.length != 1) { System.err.println("Usage: BerkeleyDbIndexer <index dir>"); System.exit(-1);/*w w w .j ava2 s .c om*/ } File indexFile = new File(args[0]); if (indexFile.exists()) { File[] files = indexFile.listFiles(); for (int i = 0; i < files.length; i++) if (files[i].getName().startsWith("__")) files[i].delete(); indexFile.delete(); } indexFile.mkdir(); EnvironmentConfig envConfig = new EnvironmentConfig(); DatabaseConfig dbConfig = new DatabaseConfig(); envConfig.setTransactional(true); envConfig.setAllowCreate(true); dbConfig.setTransactional(true); dbConfig.setAllowCreate(true); Environment env = new Environment(indexFile, envConfig); Transaction txn = env.beginTransaction(null, null); Database index = env.openDatabase(txn, "__index__", dbConfig); Database blocks = env.openDatabase(txn, "__blocks__", dbConfig); txn.commit(); txn = env.beginTransaction(null, null); JEDirectory directory = new JEDirectory(txn, index, blocks); IndexWriter writer = new IndexWriter(directory, new StandardAnalyzer(Version.LUCENE_46), true, IndexWriter.MaxFieldLength.UNLIMITED); Document doc = new Document(); doc.add(new Field("contents", "The quick brown fox...", Field.Store.YES, Field.Index.ANALYZED)); writer.addDocument(doc); writer.merge(writer.getNextMerge()); writer.close(); directory.close(); txn.commit(); index.close(); blocks.close(); env.close(); LOGGER.info("Indexing Complete"); }
From source file:org.apache.blur.manager.writer.SharedMergeScheduler.java
License:Apache License
protected void addMerges(String id, IndexWriter writer) throws IOException { OneMerge merge;/*from www . j a v a2 s . co m*/ while ((merge = writer.getNextMerge()) != null) { addMerge(id, writer, merge); } }