List of usage examples for org.apache.lucene.index IndexWriterConfig getIndexDeletionPolicy
@Override
public IndexDeletionPolicy getIndexDeletionPolicy()
From source file:com.b2international.index.compat.SingleDirectoryIndexImpl.java
License:Apache License
protected void initLucene(final File indexDirectory, final boolean clean) { try {// w w w . ja v a 2 s. c o m this.directory = Directories.openFile(indexDirectory.toPath()); final Analyzer analyzer = new ComponentTermAnalyzer(); final IndexWriterConfig config = new IndexWriterConfig(analyzer); config.setOpenMode(clean ? OpenMode.CREATE : OpenMode.CREATE_OR_APPEND); config.setIndexDeletionPolicy(new SnapshotDeletionPolicy(config.getIndexDeletionPolicy())); this.writer = new IndexWriter(directory, config); this.writer.commit(); // Create index if it didn't exist this.manager = new SearcherManager(directory, new SearchWarmerFactory()); } catch (final IOException e) { throw new RuntimeException(e.getMessage(), e); } }
From source file:com.qwazr.search.bench.LuceneCommonIndex.java
License:Apache License
LuceneCommonIndex(final Path rootDirectory, final String schemaName, final String indexName, final double ramBufferSize, final boolean useCompoundFile) throws IOException { final Path schemaDirectory = Files.createDirectory(rootDirectory.resolve(schemaName)); this.indexDirectory = Files.createDirectory(schemaDirectory.resolve(indexName)); this.luceneDirectory = indexDirectory.resolve("data"); this.dataDirectory = FSDirectory.open(luceneDirectory); final IndexWriterConfig indexWriterConfig = new IndexWriterConfig( new PerFieldAnalyzerWrapper(new StandardAnalyzer())); indexWriterConfig.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND); indexWriterConfig.setRAMBufferSizeMB(ramBufferSize); final ConcurrentMergeScheduler mergeScheduler = new ConcurrentMergeScheduler(); mergeScheduler.setMaxMergesAndThreads(MAX_SSD_MERGE_THREADS, MAX_SSD_MERGE_THREADS); indexWriterConfig.setMergeScheduler(mergeScheduler); indexWriterConfig.setUseCompoundFile(useCompoundFile); final TieredMergePolicy mergePolicy = new TieredMergePolicy(); indexWriterConfig.setMergePolicy(mergePolicy); // We use snapshots deletion policy final SnapshotDeletionPolicy snapshotDeletionPolicy = new SnapshotDeletionPolicy( indexWriterConfig.getIndexDeletionPolicy()); indexWriterConfig.setIndexDeletionPolicy(snapshotDeletionPolicy); this.indexWriter = new IndexWriter(this.dataDirectory, indexWriterConfig); this.localReplicator = new LocalReplicator(); }
From source file:com.qwazr.search.index.IndexInstance.java
License:Apache License
/** * @param schema/* w w w .ja va 2 s . c o m*/ * @param indexDirectory * @return */ final static IndexInstance newInstance(SchemaInstance schema, File indexDirectory, IndexSettingsDefinition settings) throws ServerException, IOException, ReflectiveOperationException, InterruptedException { UpdatableAnalyzer indexAnalyzer = null; UpdatableAnalyzer queryAnalyzer = null; IndexWriter indexWriter = null; Directory dataDirectory = null; try { if (!indexDirectory.exists()) indexDirectory.mkdir(); if (!indexDirectory.isDirectory()) throw new IOException( "This name is not valid. No directory exists for this location: " + indexDirectory); FileSet fileSet = new FileSet(indexDirectory); //Loading the settings if (settings == null) { settings = fileSet.settingsFile.exists() ? JsonMapper.MAPPER.readValue(fileSet.settingsFile, IndexSettingsDefinition.class) : IndexSettingsDefinition.EMPTY; } else { JsonMapper.MAPPER.writeValue(fileSet.settingsFile, settings); } //Loading the fields File fieldMapFile = new File(indexDirectory, FIELDS_FILE); LinkedHashMap<String, FieldDefinition> fieldMap = fieldMapFile.exists() ? JsonMapper.MAPPER.readValue(fieldMapFile, FieldDefinition.MapStringFieldTypeRef) : new LinkedHashMap<>(); //Loading the fields File analyzerMapFile = new File(indexDirectory, ANALYZERS_FILE); LinkedHashMap<String, AnalyzerDefinition> analyzerMap = analyzerMapFile.exists() ? JsonMapper.MAPPER.readValue(analyzerMapFile, AnalyzerDefinition.MapStringAnalyzerTypeRef) : new LinkedHashMap<>(); AnalyzerContext context = new AnalyzerContext(analyzerMap, fieldMap); indexAnalyzer = new UpdatableAnalyzer(context, context.indexAnalyzerMap); queryAnalyzer = new UpdatableAnalyzer(context, context.queryAnalyzerMap); // Open and lock the data directory dataDirectory = FSDirectory.open(fileSet.dataDirectory.toPath()); // Set IndexWriterConfig indexWriterConfig = new IndexWriterConfig(indexAnalyzer); if (settings != null && settings.similarity_class != null) indexWriterConfig.setSimilarity(IndexUtils.findSimilarity(settings.similarity_class)); indexWriterConfig.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND); SnapshotDeletionPolicy snapshotDeletionPolicy = new SnapshotDeletionPolicy( indexWriterConfig.getIndexDeletionPolicy()); indexWriterConfig.setIndexDeletionPolicy(snapshotDeletionPolicy); indexWriter = new IndexWriter(dataDirectory, indexWriterConfig); if (indexWriter.hasUncommittedChanges()) indexWriter.commit(); // Finally we build the SearchSearcherManger SearcherManager searcherManager = new SearcherManager(indexWriter, true, null); return new IndexInstance(schema, dataDirectory, settings, analyzerMap, fieldMap, fileSet, indexWriter, searcherManager, queryAnalyzer); } catch (IOException | ServerException | ReflectiveOperationException | InterruptedException e) { // We failed in opening the index. We close everything we can if (queryAnalyzer != null) IOUtils.closeQuietly(queryAnalyzer); if (indexAnalyzer != null) IOUtils.closeQuietly(indexAnalyzer); if (indexWriter != null) IOUtils.closeQuietly(indexWriter); if (dataDirectory != null) IOUtils.closeQuietly(dataDirectory); throw e; } }