List of usage examples for org.apache.lucene.index IndexWriter commit
@Override public final long commit() throws IOException
Commits all pending changes (added and deleted documents, segment merges, added indexes, etc.) to the index, and syncs all referenced index files, such that a reader will see the changes and the index updates will survive an OS or machine crash or power loss.
From source file:org.apache.blur.lucene.security.IndexSearcherTest.java
License:Apache License
private void runTest(int expected, Collection<String> readAuthorizations, Collection<String> discoverAuthorizations, Collection<String> discoverableFields) throws IOException, ParseException { IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_43, new StandardAnalyzer(Version.LUCENE_43)); Directory dir = new RAMDirectory(); {//from w w w .j a va 2s . c o m IndexWriter writer = new IndexWriter(dir, conf); writer.addDocument(getEmpty()); writer.commit(); writer.addDocument(getDoc(0, "(a&b)|d", null, "f1", "f2")); writer.addDocument(getDoc(1, "a&b&c", null, "f1", "f2")); writer.addDocument(getDoc(2, "a&b&c&e", "a&b&c", "f1", "f2")); writer.addDocument(getDoc(3, null, null, "f1", "f2"));// can't find writer.close(false); } DirectoryReader reader = DirectoryReader.open(dir); validate(expected, 2, readAuthorizations, discoverAuthorizations, discoverableFields, dir, reader); { IndexWriter writer = new IndexWriter(dir, conf); writer.deleteDocuments(new Term("id", "0")); writer.addDocument(getDoc(0, "(a&b)|d", null, "f1", "f2")); writer.close(false); } reader = DirectoryReader.openIfChanged(reader); validate(expected, 3, readAuthorizations, discoverAuthorizations, discoverableFields, dir, reader); { IndexWriter writer = new IndexWriter(dir, conf); writer.deleteDocuments(new Term("id", "1")); writer.addDocument(getDoc(1, "a&b&c", null, "f1", "f2")); writer.close(false); } reader = DirectoryReader.openIfChanged(reader); validate(expected, 4, readAuthorizations, discoverAuthorizations, discoverableFields, dir, reader); }
From source file:org.apache.blur.manager.writer.MutatableActionTest.java
License:Apache License
private DirectoryReader commitAndReopen(DirectoryReader reader, IndexWriter writer) throws IOException { writer.commit(); DirectoryReader newReader = DirectoryReader.openIfChanged(reader); if (newReader == null) { throw new IOException("Should have new data."); }//from ww w . j a v a 2 s .c o m reader.close(); return newReader; }
From source file:org.apache.blur.store.hdfs_v2.FastHdfsKeyValueDirectoryTest.java
License:Apache License
@Test public void testMulipleCommitsAndReopens() throws IOException { IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_43, new KeywordAnalyzer()); conf.setMergeScheduler(new SerialMergeScheduler()); TieredMergePolicy mergePolicy = (TieredMergePolicy) conf.getMergePolicy(); mergePolicy.setUseCompoundFile(false); Set<String> fileSet = new TreeSet<String>(); long seed = new Random().nextLong(); System.out.println("Seed:" + seed); Random random = new Random(seed); int docCount = 0; int passes = 10; byte[] segmentsGenContents = null; for (int run = 0; run < passes; run++) { final FastHdfsKeyValueDirectory directory = new FastHdfsKeyValueDirectory(false, _timer, _configuration, new Path(_path, "test_multiple_commits_reopens")); if (segmentsGenContents != null) { byte[] segmentsGenContentsCurrent = readSegmentsGen(directory); assertTrue(Arrays.equals(segmentsGenContents, segmentsGenContentsCurrent)); }/*from w w w. ja v a 2 s.co m*/ assertFiles(fileSet, run, -1, directory); assertEquals(docCount, getDocumentCount(directory)); IndexWriter writer = new IndexWriter(directory, conf.clone()); int numberOfCommits = random.nextInt(100); for (int i = 0; i < numberOfCommits; i++) { assertFiles(fileSet, run, i, directory); addDocuments(writer, random.nextInt(100)); // Before Commit writer.commit(); // After Commit // Set files after commit { fileSet.clear(); List<IndexCommit> listCommits = DirectoryReader.listCommits(directory); assertEquals(1, listCommits.size()); IndexCommit indexCommit = listCommits.get(0); fileSet.addAll(indexCommit.getFileNames()); } segmentsGenContents = readSegmentsGen(directory); } docCount = getDocumentCount(directory); } }
From source file:org.apache.cxf.systest.jaxrs.extraction.BookCatalog.java
License:Apache License
@POST @Consumes("multipart/form-data") public Response addBook(final MultipartBody body) throws Exception { for (final Attachment attachment : body.getAllAttachments()) { final DataHandler handler = attachment.getDataHandler(); if (handler != null) { final String source = handler.getName(); final LuceneDocumentMetadata metadata = new LuceneDocumentMetadata().withSource(source) .withField("modified", Date.class); final Document document = extractor.extract(handler.getInputStream(), metadata); if (document != null) { final IndexWriter writer = getIndexWriter(); try { writer.addDocument(document); writer.commit(); } finally { writer.close();//w w w .ja v a 2s. c om } } } } return Response.ok().build(); }
From source file:org.apache.cxf.systest.jaxrs.extraction.BookCatalog.java
License:Apache License
@DELETE public Response delete() throws IOException { final IndexWriter writer = getIndexWriter(); try {/*ww w. jav a 2 s.c o m*/ writer.deleteAll(); writer.commit(); } finally { writer.close(); } return Response.ok().build(); }
From source file:org.apache.geode_examples.luceneSpatial.SpatialHelperTest.java
License:Apache License
@Test public void queryFindsADocumentThatWasAdded() throws IOException { // Create an in memory lucene index to add a document to RAMDirectory directory = new RAMDirectory(); IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig()); // Add a document to the lucene index Document document = new Document(); document.add(new TextField("name", "name", Field.Store.YES)); Field[] fields = SpatialHelper.getIndexableFields(-122.8515139, 45.5099231); for (Field field : fields) { document.add(field);// www . j ava 2s . co m } writer.addDocument(document); writer.commit(); // Make sure a findWithin query locates the document Query query = SpatialHelper.findWithin(-122.8515239, 45.5099331, 1); SearcherManager searcherManager = new SearcherManager(writer, null); IndexSearcher searcher = searcherManager.acquire(); TopDocs results = searcher.search(query, 100); assertEquals(1, results.totalHits); }
From source file:org.apache.mahout.text.AbstractLuceneStorageTest.java
License:Apache License
protected void commitDocuments(Directory directory, Iterable<TestDocument> theDocs) throws IOException { IndexWriter indexWriter = new IndexWriter(directory, new IndexWriterConfig(Version.LUCENE_46, new StandardAnalyzer(Version.LUCENE_46))); for (TestDocument singleFieldDocument : theDocs) { indexWriter.addDocument(singleFieldDocument.asLuceneDocument()); }/* w w w .ja v a 2s .co m*/ indexWriter.commit(); indexWriter.close(); }
From source file:org.apache.maven.index.context.DefaultIndexingContext.java
License:Apache License
private void storeDescriptor() throws IOException { Document hdr = new Document(); hdr.add(new Field(FLD_DESCRIPTOR, FLD_DESCRIPTOR_CONTENTS, Field.Store.YES, Field.Index.NOT_ANALYZED)); hdr.add(new Field(FLD_IDXINFO, VERSION + ArtifactInfo.FS + getRepositoryId(), Field.Store.YES, Field.Index.NO));/*ww w .ja v a 2 s . co m*/ IndexWriter w = getIndexWriter(); w.updateDocument(DESCRIPTOR_TERM, hdr); w.commit(); }
From source file:org.apache.maven.index.updater.DefaultIndexUpdater.java
License:Apache License
private static void filterDirectory(final Directory directory, final DocumentFilter filter) throws IOException { IndexReader r = null;//from w w w . j av a 2 s. co m IndexWriter w = null; try { r = DirectoryReader.open(directory); w = new NexusIndexWriter(directory, new NexusAnalyzer(), false); Bits liveDocs = MultiFields.getLiveDocs(r); int numDocs = r.maxDoc(); for (int i = 0; i < numDocs; i++) { if (liveDocs != null && !liveDocs.get(i)) { continue; } Document d = r.document(i); if (!filter.accept(d)) { boolean success = w.tryDeleteDocument(r, i); // FIXME handle deletion failure } } w.commit(); } finally { IndexUtils.close(r); IndexUtils.close(w); } w = null; try { // analyzer is unimportant, since we are not adding/searching to/on index, only reading/deleting w = new NexusIndexWriter(directory, new NexusAnalyzer(), false); w.commit(); } finally { IndexUtils.close(w); } }
From source file:org.apache.maven.index.updater.IndexDataReader.java
License:Apache License
public IndexDataReadResult readIndex(IndexWriter w, IndexingContext context) throws IOException { long timestamp = readHeader(); Date date = null;/* w w w . jav a 2s .c o m*/ if (timestamp != -1) { date = new Date(timestamp); IndexUtils.updateTimestamp(w.getDirectory(), date); } int n = 0; Document doc; Set<String> rootGroups = new LinkedHashSet<>(); Set<String> allGroups = new LinkedHashSet<>(); while ((doc = readDocument()) != null) { ArtifactInfo ai = IndexUtils.constructArtifactInfo(doc, context); if (ai != null) { w.addDocument(IndexUtils.updateDocument(doc, context, false, ai)); rootGroups.add(ai.getRootGroup()); allGroups.add(ai.getGroupId()); } else { w.addDocument(doc); } n++; } w.commit(); IndexDataReadResult result = new IndexDataReadResult(); result.setDocumentCount(n); result.setTimestamp(date); result.setRootGroups(rootGroups); result.setAllGroups(allGroups); return result; }