Example usage for org.apache.lucene.index IndexWriter commit

List of usage examples for org.apache.lucene.index IndexWriter commit

Introduction

In this page you can find the example usage for org.apache.lucene.index IndexWriter commit.

Prototype

@Override
public final long commit() throws IOException 

Source Link

Document

Commits all pending changes (added and deleted documents, segment merges, added indexes, etc.) to the index, and syncs all referenced index files, such that a reader will see the changes and the index updates will survive an OS or machine crash or power loss.

Usage

From source file:org.elasticsearch.test.integration.termvectors.GetTermVectorTests.java

License:Apache License

private void writeStandardTermVector(TermVectorResponse outResponse) throws IOException {

    Directory dir = FSDirectory.open(new File("/tmp/foo"));
    IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT,
            new StandardAnalyzer(TEST_VERSION_CURRENT));
    conf.setOpenMode(OpenMode.CREATE);/*from   www .ja v  a 2s . co m*/
    IndexWriter writer = new IndexWriter(dir, conf);
    FieldType type = new FieldType(TextField.TYPE_STORED);
    type.setStoreTermVectorOffsets(true);
    type.setStoreTermVectorPayloads(false);
    type.setStoreTermVectorPositions(true);
    type.setStoreTermVectors(true);
    type.freeze();
    Document d = new Document();
    d.add(new Field("id", "abc", StringField.TYPE_STORED));
    d.add(new Field("title", "the1 quick brown fox jumps over  the1 lazy dog", type));
    d.add(new Field("desc", "the1 quick brown fox jumps over  the1 lazy dog", type));

    writer.updateDocument(new Term("id", "abc"), d);
    writer.commit();
    writer.close();
    DirectoryReader dr = DirectoryReader.open(dir);
    IndexSearcher s = new IndexSearcher(dr);
    TopDocs search = s.search(new TermQuery(new Term("id", "abc")), 1);
    ScoreDoc[] scoreDocs = search.scoreDocs;
    int doc = scoreDocs[0].doc;
    Fields fields = dr.getTermVectors(doc);
    EnumSet<Flag> flags = EnumSet.of(Flag.Positions, Flag.Offsets);
    outResponse.setFields(fields, null, flags, fields);

}

From source file:org.elasticsearch.test.integration.termvectors.GetTermVectorTests.java

License:Apache License

private Fields buildWithLuceneAndReturnFields(String docId, String[] fields, String[] content,
        boolean[] withPositions, boolean[] withOffsets, boolean[] withPayloads) throws IOException {
    assert (fields.length == withPayloads.length);
    assert (content.length == withPayloads.length);
    assert (withPositions.length == withPayloads.length);
    assert (withOffsets.length == withPayloads.length);

    Map<String, Analyzer> mapping = new HashMap<String, Analyzer>();
    for (int i = 0; i < withPayloads.length; i++) {
        if (withPayloads[i]) {
            mapping.put(fields[i], new Analyzer() {
                @Override/*w  w  w . j  a va 2  s.c  om*/
                protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
                    Tokenizer tokenizer = new StandardTokenizer(TEST_VERSION_CURRENT, reader);
                    TokenFilter filter = new LowerCaseFilter(TEST_VERSION_CURRENT, tokenizer);
                    filter = new TypeAsPayloadTokenFilter(filter);
                    return new TokenStreamComponents(tokenizer, filter);
                }

            });
        }
    }
    PerFieldAnalyzerWrapper wrapper = new PerFieldAnalyzerWrapper(new StandardAnalyzer(TEST_VERSION_CURRENT),
            mapping);

    Directory dir = FSDirectory.open(new File("/tmp/foo"));
    IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, wrapper);

    conf.setOpenMode(OpenMode.CREATE);
    IndexWriter writer = new IndexWriter(dir, conf);

    Document d = new Document();
    for (int i = 0; i < fields.length; i++) {
        d.add(new Field("id", docId, StringField.TYPE_STORED));
        FieldType type = new FieldType(TextField.TYPE_STORED);
        type.setStoreTermVectorOffsets(withOffsets[i]);
        type.setStoreTermVectorPayloads(withPayloads[i]);
        type.setStoreTermVectorPositions(withPositions[i] || withOffsets[i] || withPayloads[i]);
        type.setStoreTermVectors(true);
        type.freeze();
        d.add(new Field(fields[i], content[i], type));
        writer.updateDocument(new Term("id", docId), d);
        writer.commit();
    }
    writer.close();

    DirectoryReader dr = DirectoryReader.open(dir);
    IndexSearcher s = new IndexSearcher(dr);
    TopDocs search = s.search(new TermQuery(new Term("id", docId)), 1);

    ScoreDoc[] scoreDocs = search.scoreDocs;
    assert (scoreDocs.length == 1);
    int doc = scoreDocs[0].doc;
    Fields returnFields = dr.getTermVectors(doc);
    return returnFields;

}

From source file:org.elasticsearch.test.stress.compress.LuceneCompressionStressTest.java

License:Apache License

public static void main(String[] args) throws Exception {
    final boolean USE_COMPOUND = false;
    final Compressor compressor = CompressorFactory.defaultCompressor();

    File testFile = new File("target/bench/compress/lucene");
    FileSystemUtils.deleteRecursively(testFile);
    testFile.mkdirs();/*from ww w.  java 2 s. co  m*/

    Directory dir = new CompressedDirectory(new NIOFSDirectory(new File(testFile, "compressed")), compressor,
            false, "fdt", "tvf");
    TieredMergePolicy mergePolicy = new TieredMergePolicy();
    mergePolicy.setUseCompoundFile(USE_COMPOUND);
    IndexWriter writer = new IndexWriter(dir,
            new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER).setMergePolicy(mergePolicy));

    System.out.println("feeding data...");
    TestData testData = new TestData();
    long count = 0;
    long round = 0;
    while (true) {
        // json
        XContentBuilder builder = XContentFactory.jsonBuilder();
        testData.current(builder);
        builder.close();
        Document doc = new Document();
        doc.add(new Field("_source", builder.bytes().array(), builder.bytes().arrayOffset(),
                builder.bytes().length()));
        if (true) {
            Field field = new Field("text", builder.string(), Field.Store.NO, Field.Index.ANALYZED,
                    Field.TermVector.WITH_POSITIONS_OFFSETS);
            doc.add(field);
        }
        writer.addDocument(doc);

        if ((++count % 10000) == 0) {
            writer.commit();
            ++round;
            System.out.println(DateTime.now() + "[" + round + "] closing");
            writer.close(true);
            System.out.println(DateTime.now() + "[" + round + "] closed");
            CheckIndex checkIndex = new CheckIndex(dir);
            FastByteArrayOutputStream os = new FastByteArrayOutputStream();
            PrintStream out = new PrintStream(os);
            checkIndex.setInfoStream(out);
            out.flush();
            CheckIndex.Status status = checkIndex.checkIndex();
            if (!status.clean) {
                System.out.println("check index [failure]\n" + new String(os.bytes().toBytes()));
            } else {
                System.out.println(DateTime.now() + "[" + round + "] checked");
            }
            mergePolicy = new TieredMergePolicy();
            mergePolicy.setUseCompoundFile(USE_COMPOUND);
            writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER)
                    .setMergePolicy(mergePolicy));
        }
    }
}

From source file:org.elasticsearch.test.stress.compress.LuceneCompressionStressTests.java

License:Apache License

@Test
public void test() throws Exception {
    final boolean USE_COMPOUND = false;
    final Compressor compressor = CompressorFactory.defaultCompressor();

    File testFile = new File("target/bench/compress/lucene");
    FileSystemUtils.deleteRecursively(testFile);
    testFile.mkdirs();/*from w  ww  .  ja va2s  . c  o m*/

    Directory dir = new CompressedDirectory(new NIOFSDirectory(new File(testFile, "compressed")), compressor,
            false, "fdt", "tvf");
    TieredMergePolicy mergePolicy = new TieredMergePolicy();
    mergePolicy.setUseCompoundFile(USE_COMPOUND);
    IndexWriter writer = new IndexWriter(dir,
            new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER).setMergePolicy(mergePolicy));

    logger.info("feeding data...");
    TestData testData = new TestData();
    long count = 0;
    long round = 0;
    while (round < 100) {
        // json
        XContentBuilder builder = XContentFactory.jsonBuilder();
        testData.current(builder);
        builder.close();
        Document doc = new Document();
        doc.add(new Field("_source", builder.bytes().array(), builder.bytes().arrayOffset(),
                builder.bytes().length()));
        if (true) {
            Field field = new Field("text", builder.string(), Field.Store.NO, Field.Index.ANALYZED,
                    Field.TermVector.WITH_POSITIONS_OFFSETS);
            doc.add(field);
        }
        writer.addDocument(doc);

        if ((++count % 10000) == 0) {
            writer.commit();
            ++round;
            logger.info("[" + round + "] closing");
            writer.close(true);
            logger.info("[" + round + "] closed");
            CheckIndex checkIndex = new CheckIndex(dir);
            FastByteArrayOutputStream os = new FastByteArrayOutputStream();
            PrintStream out = new PrintStream(os);
            checkIndex.setInfoStream(out);
            out.flush();
            CheckIndex.Status status = checkIndex.checkIndex();
            if (!status.clean) {
                logger.warn("check index [failure]\n" + new String(os.bytes().toBytes()));
            } else {
                logger.info("[" + round + "] checked");
            }
            mergePolicy = new TieredMergePolicy();
            mergePolicy.setUseCompoundFile(USE_COMPOUND);
            writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER)
                    .setMergePolicy(mergePolicy));
        }
    }
}

From source file:org.elasticsearch.test.unit.common.compress.CompressIndexInputOutputTests.java

License:Apache License

private void lucene(Compressor compressor) throws Exception {
    CompressedDirectory dir = new CompressedDirectory(new RAMDirectory(), compressor, false, "fdt");
    IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
    writer.addDocument(createDoc(1, (int) SizeValue.parseSizeValue("100b").singles()));
    writer.addDocument(createDoc(2, (int) SizeValue.parseSizeValue("5k").singles()));
    writer.commit();
    writer.addDocument(createDoc(3, (int) SizeValue.parseSizeValue("2k").singles()));
    writer.addDocument(createDoc(4, (int) SizeValue.parseSizeValue("1k").singles()));
    writer.commit();/*w w w .  java2 s  .  c o  m*/
    verify(writer);
    writer.forceMerge(1);
    writer.waitForMerges();
    verify(writer);
    dir.setCompress(false);
    writer.addDocument(createDoc(5, (int) SizeValue.parseSizeValue("2k").singles()));
    writer.addDocument(createDoc(6, (int) SizeValue.parseSizeValue("1k").singles()));
    verify(writer);
    writer.forceMerge(1);
    writer.waitForMerges();
    verify(writer);
    writer.close();
}

From source file:org.elasticsearch.test.unit.common.lucene.search.MoreLikeThisQueryTests.java

License:Apache License

@Test
public void testSimple() throws Exception {
    Directory dir = new RAMDirectory();
    IndexWriter indexWriter = new IndexWriter(dir,
            new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
    indexWriter.commit();

    Document document = new Document();
    document.add(new TextField("_id", "1", Field.Store.YES));
    document.add(new TextField("text", "lucene", Field.Store.YES));
    indexWriter.addDocument(document);//  www  . j  a  va2 s. co m

    document = new Document();
    document.add(new TextField("_id", "2", Field.Store.YES));
    document.add(new TextField("text", "lucene release", Field.Store.YES));
    indexWriter.addDocument(document);

    IndexReader reader = IndexReader.open(indexWriter, true);
    IndexSearcher searcher = new IndexSearcher(reader);

    MoreLikeThisQuery mltQuery = new MoreLikeThisQuery("lucene", new String[] { "text" },
            Lucene.STANDARD_ANALYZER);
    mltQuery.setLikeText("lucene");
    mltQuery.setMinTermFrequency(1);
    mltQuery.setMinDocFreq(1);
    long count = Lucene.count(searcher, mltQuery);
    assertThat(count, equalTo(2l));

    reader.close();
    indexWriter.close();
}

From source file:org.elasticsearch.test.unit.common.lucene.search.TermsFilterTests.java

License:Apache License

@Test
public void testTermFilter() throws Exception {
    String fieldName = "field1";
    Directory rd = new RAMDirectory();
    IndexWriter w = new IndexWriter(rd, new IndexWriterConfig(Lucene.VERSION, new KeywordAnalyzer()));
    for (int i = 0; i < 100; i++) {
        Document doc = new Document();
        int term = i * 10; //terms are units of 10;
        doc.add(new Field(fieldName, "" + term, StringField.TYPE_NOT_STORED));
        doc.add(new Field("all", "xxx", StringField.TYPE_NOT_STORED));
        w.addDocument(doc);/*from   w ww .j  ava 2s . c  o  m*/
        if ((i % 40) == 0) {
            w.commit();
        }
    }
    AtomicReader reader = new SlowCompositeReaderWrapper(DirectoryReader.open(w, true));
    w.close();

    TermFilter tf = new TermFilter(new Term(fieldName, "19"));
    FixedBitSet bits = (FixedBitSet) tf.getDocIdSet(reader.getContext(), reader.getLiveDocs());
    assertThat(bits, nullValue());

    tf = new TermFilter(new Term(fieldName, "20"));
    bits = (FixedBitSet) tf.getDocIdSet(reader.getContext(), reader.getLiveDocs());
    assertThat(bits.cardinality(), equalTo(1));

    tf = new TermFilter(new Term("all", "xxx"));
    bits = (FixedBitSet) tf.getDocIdSet(reader.getContext(), reader.getLiveDocs());
    assertThat(bits.cardinality(), equalTo(100));

    reader.close();
    rd.close();
}

From source file:org.elasticsearch.test.unit.common.lucene.search.TermsFilterTests.java

License:Apache License

@Test
public void testTermsFilter() throws Exception {
    String fieldName = "field1";
    Directory rd = new RAMDirectory();
    IndexWriter w = new IndexWriter(rd, new IndexWriterConfig(Lucene.VERSION, new KeywordAnalyzer()));
    for (int i = 0; i < 100; i++) {
        Document doc = new Document();
        int term = i * 10; //terms are units of 10;
        doc.add(new Field(fieldName, "" + term, StringField.TYPE_NOT_STORED));
        doc.add(new Field("all", "xxx", StringField.TYPE_NOT_STORED));
        w.addDocument(doc);//w ww . j  av a  2  s .  com
        if ((i % 40) == 0) {
            w.commit();
        }
    }
    AtomicReader reader = new SlowCompositeReaderWrapper(DirectoryReader.open(w, true));
    w.close();

    TermsFilter tf = new TermsFilter(new Term[] { new Term(fieldName, "19") });
    FixedBitSet bits = (FixedBitSet) tf.getDocIdSet(reader.getContext(), reader.getLiveDocs());
    assertThat(bits, nullValue());

    tf = new TermsFilter(new Term[] { new Term(fieldName, "19"), new Term(fieldName, "20") });
    bits = (FixedBitSet) tf.getDocIdSet(reader.getContext(), reader.getLiveDocs());
    assertThat(bits.cardinality(), equalTo(1));

    tf = new TermsFilter(
            new Term[] { new Term(fieldName, "19"), new Term(fieldName, "20"), new Term(fieldName, "10") });
    bits = (FixedBitSet) tf.getDocIdSet(reader.getContext(), reader.getLiveDocs());
    assertThat(bits.cardinality(), equalTo(2));

    tf = new TermsFilter(new Term[] { new Term(fieldName, "19"), new Term(fieldName, "20"),
            new Term(fieldName, "10"), new Term(fieldName, "00") });
    bits = (FixedBitSet) tf.getDocIdSet(reader.getContext(), reader.getLiveDocs());
    assertThat(bits.cardinality(), equalTo(2));

    reader.close();
    rd.close();
}

From source file:org.elasticsearch.test.unit.common.lucene.uid.VersionsTests.java

License:Apache License

@Test
public void testMergingOldIndices() throws Exception {
    final IndexWriterConfig iwConf = new IndexWriterConfig(Lucene.VERSION, new KeywordAnalyzer());
    iwConf.setMergePolicy(new IndexUpgraderMergePolicy(iwConf.getMergePolicy()));
    final Directory dir = newDirectory();
    final IndexWriter iw = new IndexWriter(dir, iwConf);

    // 1st segment, no _version
    Document document = new Document();
    // Add a dummy field (enough to trigger #3237)
    document.add(new StringField("a", "b", Store.NO));
    StringField uid = new StringField(UidFieldMapper.NAME, "1", Store.YES);
    document.add(uid);/*from ww  w  .ja va  2s  . c om*/
    iw.addDocument(document);
    uid.setStringValue("2");
    iw.addDocument(document);
    iw.commit();

    // 2nd segment, old layout
    document = new Document();
    UidField uidAndVersion = new UidField("3", 3L);
    document.add(uidAndVersion);
    iw.addDocument(document);
    uidAndVersion.uid = "4";
    uidAndVersion.version = 4L;
    iw.addDocument(document);
    iw.commit();

    // 3rd segment new layout
    document = new Document();
    uid.setStringValue("5");
    Field version = new NumericDocValuesField(UidFieldMapper.VERSION, 5L);
    document.add(uid);
    document.add(version);
    iw.addDocument(document);
    uid.setStringValue("6");
    version.setLongValue(6L);
    iw.addDocument(document);
    iw.commit();

    final Map<String, Long> expectedVersions = ImmutableMap.<String, Long>builder().put("1", 0L).put("2", 0L)
            .put("3", 0L).put("4", 4L).put("5", 5L).put("6", 6L).build();

    // Force merge and check versions
    iw.forceMerge(1);
    final AtomicReader ir = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(iw.getDirectory()));
    final NumericDocValues versions = ir.getNumericDocValues(UidFieldMapper.VERSION);
    assertThat(versions, notNullValue());
    for (int i = 0; i < ir.maxDoc(); ++i) {
        final String uidValue = ir.document(i).get(UidFieldMapper.NAME);
        final long expectedVersion = expectedVersions.get(uidValue);
        assertThat(versions.get(i), equalTo(expectedVersion));
    }

    iw.close();
    assertThat(IndexWriter.isLocked(iw.getDirectory()), is(false));
    ir.close();
    dir.close();
}

From source file:org.elasticsearch.test.unit.termvectors.TermVectorUnitTests.java

License:Apache License

private void writeEmptyTermVector(TermVectorResponse outResponse) throws IOException {

    Directory dir = new RAMDirectory();
    IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT,
            new StandardAnalyzer(TEST_VERSION_CURRENT));
    conf.setOpenMode(OpenMode.CREATE);/*from   w w w .  ja  va 2s .  c  om*/
    IndexWriter writer = new IndexWriter(dir, conf);
    FieldType type = new FieldType(TextField.TYPE_STORED);
    type.setStoreTermVectorOffsets(true);
    type.setStoreTermVectorPayloads(false);
    type.setStoreTermVectorPositions(true);
    type.setStoreTermVectors(true);
    type.freeze();
    Document d = new Document();
    d.add(new Field("id", "abc", StringField.TYPE_STORED));

    writer.updateDocument(new Term("id", "abc"), d);
    writer.commit();
    writer.close();
    DirectoryReader dr = DirectoryReader.open(dir);
    IndexSearcher s = new IndexSearcher(dr);
    TopDocs search = s.search(new TermQuery(new Term("id", "abc")), 1);
    ScoreDoc[] scoreDocs = search.scoreDocs;
    int doc = scoreDocs[0].doc;
    Fields fields = dr.getTermVectors(doc);
    EnumSet<Flag> flags = EnumSet.of(Flag.Positions, Flag.Offsets);
    outResponse.setFields(fields, null, flags, fields);
    outResponse.setExists(true);

}