Example usage for org.apache.lucene.index IndexWriter commit

List of usage examples for org.apache.lucene.index IndexWriter commit

Introduction

In this page you can find the example usage for org.apache.lucene.index IndexWriter commit.

Prototype

@Override
public final long commit() throws IOException 

Source Link

Document

Commits all pending changes (added and deleted documents, segment merges, added indexes, etc.) to the index, and syncs all referenced index files, such that a reader will see the changes and the index updates will survive an OS or machine crash or power loss.

Usage

From source file:org.elasticsearch.test.unit.termvectors.TermVectorUnitTests.java

License:Apache License

private void writeStandardTermVector(TermVectorResponse outResponse) throws IOException {

    Directory dir = FSDirectory.open(new File("/tmp/foo"));
    IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT,
            new StandardAnalyzer(TEST_VERSION_CURRENT));

    conf.setOpenMode(OpenMode.CREATE);/*from w  w w  . jav  a  2s .  c om*/
    IndexWriter writer = new IndexWriter(dir, conf);
    FieldType type = new FieldType(TextField.TYPE_STORED);
    type.setStoreTermVectorOffsets(true);
    type.setStoreTermVectorPayloads(false);
    type.setStoreTermVectorPositions(true);
    type.setStoreTermVectors(true);
    type.freeze();
    Document d = new Document();
    d.add(new Field("id", "abc", StringField.TYPE_STORED));
    d.add(new Field("title", "the1 quick brown fox jumps over  the1 lazy dog", type));
    d.add(new Field("desc", "the1 quick brown fox jumps over  the1 lazy dog", type));

    writer.updateDocument(new Term("id", "abc"), d);
    writer.commit();
    writer.close();
    DirectoryReader dr = DirectoryReader.open(dir);
    IndexSearcher s = new IndexSearcher(dr);
    TopDocs search = s.search(new TermQuery(new Term("id", "abc")), 1);
    ScoreDoc[] scoreDocs = search.scoreDocs;
    int doc = scoreDocs[0].doc;
    Fields termVectors = dr.getTermVectors(doc);
    EnumSet<Flag> flags = EnumSet.of(Flag.Positions, Flag.Offsets);
    outResponse.setFields(termVectors, null, flags, termVectors);

}

From source file:org.elasticsearch.util.lucene.search.MoreLikeThisQueryTests.java

License:Apache License

@Test
public void testSimple() throws Exception {
    Directory dir = new RAMDirectory();
    IndexWriter indexWriter = new IndexWriter(dir, Lucene.STANDARD_ANALYZER, true,
            IndexWriter.MaxFieldLength.UNLIMITED);
    indexWriter.commit();
    assertThat("Index is empty after creation and commit", estimateRamSize(indexWriter), equalTo(0l));

    indexWriter.addDocument(doc().add(field("_id", "1")).add(field("text", "lucene")).build());
    indexWriter.addDocument(doc().add(field("_id", "2")).add(field("text", "lucene release")).build());

    IndexReader reader = indexWriter.getReader();
    IndexSearcher searcher = new IndexSearcher(reader);

    MoreLikeThisQuery mltQuery = new MoreLikeThisQuery("lucene", new String[] { "text" },
            Lucene.STANDARD_ANALYZER);// ww  w .j ava  2  s  .  c  o  m
    mltQuery.setLikeText("lucene");
    mltQuery.setMinTermFrequency(1);
    mltQuery.setMinDocFreq(1);
    long count = Lucene.count(searcher, mltQuery, -1);
    assertThat(count, equalTo(2l));

    reader.close();
    indexWriter.close();
}

From source file:org.elasticsearch.xpack.core.security.authz.accesscontrol.SecurityIndexSearcherWrapperIntegrationTests.java

License:Open Source License

public void testDLS() throws Exception {
    ShardId shardId = new ShardId("_index", "_na_", 0);
    MapperService mapperService = mock(MapperService.class);
    ScriptService scriptService = mock(ScriptService.class);
    when(mapperService.documentMapper()).thenReturn(null);
    when(mapperService.simpleMatchToFullName(anyString()))
            .then(invocationOnMock -> Collections.singletonList((String) invocationOnMock.getArguments()[0]));

    ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
    IndicesAccessControl.IndexAccessControl indexAccessControl = new IndicesAccessControl.IndexAccessControl(
            true, new FieldPermissions(), singleton(new BytesArray("{\"match_all\" : {}}")));
    IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(shardId.getIndex(), Settings.EMPTY);
    Client client = mock(Client.class);
    when(client.settings()).thenReturn(Settings.EMPTY);
    final long nowInMillis = randomNonNegativeLong();
    QueryShardContext realQueryShardContext = new QueryShardContext(shardId.id(), indexSettings, null, null,
            mapperService, null, null, xContentRegistry(), writableRegistry(), client, null, () -> nowInMillis,
            null);/*from  w w w  .  ja  v a 2s  .c  om*/
    QueryShardContext queryShardContext = spy(realQueryShardContext);
    IndexSettings settings = IndexSettingsModule.newIndexSettings("_index", Settings.EMPTY);
    BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(settings, new BitsetFilterCache.Listener() {
        @Override
        public void onCache(ShardId shardId, Accountable accountable) {
        }

        @Override
        public void onRemoval(ShardId shardId, Accountable accountable) {

        }
    });
    XPackLicenseState licenseState = mock(XPackLicenseState.class);
    when(licenseState.isDocumentAndFieldLevelSecurityAllowed()).thenReturn(true);
    when(licenseState.isSecurityEnabled()).thenReturn(true);
    SecurityIndexSearcherWrapper wrapper = new SecurityIndexSearcherWrapper(indexSettings,
            s -> queryShardContext, bitsetFilterCache, threadContext, licenseState, scriptService) {

        @Override
        protected IndicesAccessControl getIndicesAccessControl() {
            return new IndicesAccessControl(true, singletonMap("_index", indexAccessControl));
        }
    };

    Directory directory = newDirectory();
    IndexWriter iw = new IndexWriter(directory,
            new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(NoMergePolicy.INSTANCE));

    int numValues = scaledRandomIntBetween(2, 16);
    String[] values = new String[numValues];
    for (int i = 0; i < numValues; i++) {
        values[i] = "value" + i;
    }
    int[] valuesHitCount = new int[numValues];

    int numDocs = scaledRandomIntBetween(32, 128);
    int commitAfter = scaledRandomIntBetween(1, numDocs);
    logger.info(
            "Going to index [{}] documents with [{}] unique values and commit after [{}] documents have been indexed",
            numDocs, numValues, commitAfter);

    for (int doc = 1; doc <= numDocs; doc++) {
        int valueIndex = (numValues - 1) % doc;

        Document document = new Document();
        String id = String.valueOf(doc);
        document.add(new StringField("id", id, Field.Store.NO));
        String value = values[valueIndex];
        document.add(new StringField("field", value, Field.Store.NO));
        iw.addDocument(document);
        if (doc % 11 == 0) {
            iw.deleteDocuments(new Term("id", id));
        } else {
            if (commitAfter % commitAfter == 0) {
                iw.commit();
            }
            valuesHitCount[valueIndex]++;
        }
    }
    iw.close();
    StringBuilder valueToHitCountOutput = new StringBuilder();
    for (int i = 0; i < numValues; i++) {
        valueToHitCountOutput.append(values[i]).append('\t').append(valuesHitCount[i]).append('\n');
    }
    logger.info("Value count matrix:\n{}", valueToHitCountOutput);

    DirectoryReader directoryReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory),
            shardId);
    for (int i = 0; i < numValues; i++) {
        ParsedQuery parsedQuery = new ParsedQuery(new TermQuery(new Term("field", values[i])));
        doReturn(new TermQueryBuilder("field", values[i])).when(queryShardContext)
                .parseInnerQueryBuilder(any(XContentParser.class));
        when(queryShardContext.toFilter(new TermsQueryBuilder("field", values[i]))).thenReturn(parsedQuery);
        DirectoryReader wrappedDirectoryReader = wrapper.wrap(directoryReader);
        IndexSearcher indexSearcher = wrapper.wrap(new IndexSearcher(wrappedDirectoryReader));

        int expectedHitCount = valuesHitCount[i];
        logger.info("Going to verify hit count with query [{}] with expected total hits [{}]",
                parsedQuery.query(), expectedHitCount);
        TotalHitCountCollector countCollector = new TotalHitCountCollector();
        indexSearcher.search(new MatchAllDocsQuery(), countCollector);
        assertThat(countCollector.getTotalHits(), equalTo(expectedHitCount));
        assertThat(wrappedDirectoryReader.numDocs(), equalTo(expectedHitCount));
    }

    bitsetFilterCache.close();
    directoryReader.close();
    directory.close();
}

From source file:org.elasticsearch.xpack.core.security.authz.accesscontrol.SecurityIndexSearcherWrapperUnitTests.java

License:Open Source License

public void testIntersectScorerAndRoleBits() throws Exception {
    securityIndexSearcherWrapper = new SecurityIndexSearcherWrapper(indexSettings, null, null, threadContext,
            licenseState, scriptService);
    final Directory directory = newDirectory();
    IndexWriter iw = new IndexWriter(directory,
            new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(NoMergePolicy.INSTANCE));

    Document document = new Document();
    document.add(new StringField("field1", "value1", Field.Store.NO));
    document.add(new StringField("field2", "value1", Field.Store.NO));
    iw.addDocument(document);//from w w  w  . ja  v a2  s . c  om

    document = new Document();
    document.add(new StringField("field1", "value2", Field.Store.NO));
    document.add(new StringField("field2", "value1", Field.Store.NO));
    iw.addDocument(document);

    document = new Document();
    document.add(new StringField("field1", "value3", Field.Store.NO));
    document.add(new StringField("field2", "value1", Field.Store.NO));
    iw.addDocument(document);

    document = new Document();
    document.add(new StringField("field1", "value4", Field.Store.NO));
    document.add(new StringField("field2", "value1", Field.Store.NO));
    iw.addDocument(document);

    iw.commit();
    iw.deleteDocuments(new Term("field1", "value3"));
    iw.close();
    DirectoryReader directoryReader = DirectoryReader.open(directory);
    IndexSearcher searcher = new IndexSearcher(directoryReader);
    Weight weight = searcher.createNormalizedWeight(new TermQuery(new Term("field2", "value1")), false);

    LeafReaderContext leaf = directoryReader.leaves().get(0);

    SparseFixedBitSet sparseFixedBitSet = query(leaf, "field1", "value1");
    LeafCollector leafCollector = new LeafBucketCollector() {
        @Override
        public void collect(int doc, long bucket) throws IOException {
            assertThat(doc, equalTo(0));
        }
    };
    intersectScorerAndRoleBits(weight.scorer(leaf), sparseFixedBitSet, leafCollector,
            leaf.reader().getLiveDocs());

    sparseFixedBitSet = query(leaf, "field1", "value2");
    leafCollector = new LeafBucketCollector() {
        @Override
        public void collect(int doc, long bucket) throws IOException {
            assertThat(doc, equalTo(1));
        }
    };
    intersectScorerAndRoleBits(weight.scorer(leaf), sparseFixedBitSet, leafCollector,
            leaf.reader().getLiveDocs());

    sparseFixedBitSet = query(leaf, "field1", "value3");
    leafCollector = new LeafBucketCollector() {
        @Override
        public void collect(int doc, long bucket) throws IOException {
            fail("docId [" + doc + "] should have been deleted");
        }
    };
    intersectScorerAndRoleBits(weight.scorer(leaf), sparseFixedBitSet, leafCollector,
            leaf.reader().getLiveDocs());

    sparseFixedBitSet = query(leaf, "field1", "value4");
    leafCollector = new LeafBucketCollector() {
        @Override
        public void collect(int doc, long bucket) throws IOException {
            assertThat(doc, equalTo(3));
        }
    };
    intersectScorerAndRoleBits(weight.scorer(leaf), sparseFixedBitSet, leafCollector,
            leaf.reader().getLiveDocs());

    directoryReader.close();
    directory.close();
}

From source file:org.elasticsearch.xpack.security.authz.accesscontrol.FieldDataCacheWithFieldSubsetReaderTests.java

License:Open Source License

@Before
public void setup() throws Exception {
    IndexSettings indexSettings = createIndexSettings();
    CircuitBreakerService circuitBreakerService = new NoneCircuitBreakerService();
    String name = "_field";
    indexFieldDataCache = new DummyAccountingFieldDataCache();
    sortedSetDVOrdinalsIndexFieldData = new SortedSetDVOrdinalsIndexFieldData(indexSettings,
            indexFieldDataCache, name, circuitBreakerService,
            AbstractAtomicOrdinalsFieldData.DEFAULT_SCRIPT_FUNCTION);
    pagedBytesIndexFieldData = new PagedBytesIndexFieldData(indexSettings, name, indexFieldDataCache,
            circuitBreakerService, TextFieldMapper.Defaults.FIELDDATA_MIN_FREQUENCY,
            TextFieldMapper.Defaults.FIELDDATA_MAX_FREQUENCY,
            TextFieldMapper.Defaults.FIELDDATA_MIN_SEGMENT_SIZE);

    dir = newDirectory();/*from w ww. java  2 s .  c om*/
    IndexWriterConfig iwc = new IndexWriterConfig(null);
    iwc.setMergePolicy(NoMergePolicy.INSTANCE);
    IndexWriter iw = new IndexWriter(dir, iwc);
    numDocs = scaledRandomIntBetween(32, 128);

    for (int i = 1; i <= numDocs; i++) {
        Document doc = new Document();
        doc.add(new StringField("_field", String.valueOf(i), Field.Store.NO));
        doc.add(new SortedSetDocValuesField("_field", new BytesRef(String.valueOf(i))));
        iw.addDocument(doc);
        if (i % 24 == 0) {
            iw.commit();
        }
    }
    iw.close();
    ir = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(dir), new ShardId(indexSettings.getIndex(), 0));
}

From source file:org.elbe.relations.indexer.lucene.LuceneIndexer.java

License:Open Source License

@Override
public void processIndexer(final IndexerHelper inIndexer, final File inIndexDir, final String inLanguage,
        final boolean inCreate) throws IOException {
    IndexWriter lWriter = null;
    try {/*from w  w w  . java 2s  .  c  om*/
        lWriter = new IndexWriter(inIndexDir, getAnalyzer(inLanguage), inCreate,
                IndexWriter.MaxFieldLength.UNLIMITED);
        for (final IndexerDocument lDoc : inIndexer.getDocuments()) {
            final Document lDocument = transformDoc(lDoc);
            lWriter.addDocument(lDocument);
        }
        lWriter.commit();
        synchronized (this) {
            lWriter.optimize();
        }
    } catch (final IOException exc) {
        LOG.error("Error with Lucene index encountered!", exc);
    } finally {
        if (lWriter != null) {
            lWriter.close();
        }
    }
}

From source file:org.elbe.relations.indexer.lucene.LuceneIndexer.java

License:Open Source License

@Override
public void initializeIndex(final File inIndexDir) throws IOException {
    IndexWriter lWriter = null;
    try {/*from ww w  .jav  a  2  s.  com*/
        lWriter = new IndexWriter(inIndexDir, getAnalyzer(""), true, IndexWriter.MaxFieldLength.UNLIMITED); //$NON-NLS-1$
        lWriter.commit();
    } finally {
        if (lWriter != null)
            lWriter.close();
    }
}

From source file:org.eobjects.datacleaner.lucene.AbstractSearchIndex.java

License:Open Source License

@Override
public void write(Action<IndexWriter> writerAction) {
    try {/*  ww w .ja  v a2 s. c  o  m*/
        final Analyzer analyzer = new SimpleAnalyzer(Constants.VERSION);
        final IndexWriterConfig conf = new IndexWriterConfig(Constants.VERSION, analyzer);
        final Directory directory = getDirectory();
        final IndexWriter writer = new IndexWriter(directory, conf);
        try {
            writerAction.run(writer);
            writer.commit();
        } catch (Throwable e) {
            if (e instanceof RuntimeException) {
                throw (RuntimeException) e;
            }
            throw new IllegalStateException("Write action threw exception", e);
        } finally {
            writer.close();
        }
    } catch (IOException e) {
        throw new IllegalStateException("Could not write to directory", e);
    }
}

From source file:org.eobjects.datacleaner.lucene.SearchIndexMatcherTransformerTest.java

License:Open Source License

@Override
protected void setUp() throws Exception {
    companyWords.write(new Action<IndexWriter>() {
        @Override//from   w  ww. j av  a2  s  . com
        public void run(IndexWriter w) throws Exception {
            w.addDocument(SearchHelper.createSimpleDoc("Corporation"));
            w.addDocument(SearchHelper.createSimpleDoc("Corp"));
            w.addDocument(SearchHelper.createSimpleDoc("Co"));
            w.addDocument(SearchHelper.createSimpleDoc("Co."));
            w.addDocument(SearchHelper.createSimpleDoc("Ltd"));
            w.addDocument(SearchHelper.createSimpleDoc("Limited"));
            w.addDocument(SearchHelper.createSimpleDoc("Aps"));
            w.addDocument(SearchHelper.createSimpleDoc("B.V."));
            w.addDocument(SearchHelper.createSimpleDoc("BV"));
            w.addDocument(SearchHelper.createSimpleDoc("A/S"));
            w.addDocument(SearchHelper.createSimpleDoc("GmbH"));
            w.commit();
        }
    });

    givenNames.write(new Action<IndexWriter>() {
        @Override
        public void run(IndexWriter w) throws Exception {
            w.addDocument(SearchHelper.createSimpleDoc("Kasper"));
            w.addDocument(SearchHelper.createSimpleDoc("Ankit"));
            w.addDocument(SearchHelper.createSimpleDoc("Manuel"));
            w.addDocument(SearchHelper.createSimpleDoc("Hans"));
            w.addDocument(SearchHelper.createSimpleDoc("Winfried"));
            w.addDocument(SearchHelper.createSimpleDoc("Andre"));
            w.addDocument(SearchHelper.createSimpleDoc("Vincent"));
            w.commit();
        }
    });

    familyNames.write(new Action<IndexWriter>() {
        @Override
        public void run(IndexWriter w) throws Exception {
            w.addDocument(SearchHelper.createSimpleDoc("Srensen"));
            w.addDocument(SearchHelper.createSimpleDoc("Kumar"));
            w.addDocument(SearchHelper.createSimpleDoc("van den Berg"));
            w.addDocument(SearchHelper.createSimpleDoc("Drexler"));
            w.addDocument(SearchHelper.createSimpleDoc("van Holland"));
            w.addDocument(SearchHelper.createSimpleDoc("Velthoen"));
            w.addDocument(SearchHelper.createSimpleDoc("van Hunnik"));
            w.commit();
        }
    });
}

From source file:org.eu.bitzone.Leia.java

License:Apache License

public void commitUserData(final Object dialog) {
    final Map<String, String> userData = (Map<String, String>) getProperty(dialog, "userData");
    remove(dialog);//from   w  w w  . j a  v  a2  s  .co  m
    if (!(ir instanceof DirectoryReader)) {
        errorMsg("Not possible with " + ir.getClass().getSimpleName());
        return;
    }
    try {
        final IndexWriter iw = createIndexWriter();
        iw.setCommitData(userData);
        iw.commit();
        iw.close();
        refreshAfterWrite();
    } catch (final Exception e) {
        errorMsg("Error: " + e.toString());
    }
}