Example usage for org.apache.lucene.index IndexWriterConfig getMergePolicy

List of usage examples for org.apache.lucene.index IndexWriterConfig getMergePolicy

Introduction

In this page you can find the example usage for org.apache.lucene.index IndexWriterConfig getMergePolicy.

Prototype

@Override
    public MergePolicy getMergePolicy() 

Source Link

Usage

From source file:org.apache.solr.core.TestMergePolicyConfig.java

License:Apache License

public void testLegacyMergePolicyConfig() throws Exception {
    final boolean expectCFS = Boolean.parseBoolean(System.getProperty("useCompoundFile"));

    initCore("solrconfig-mergepolicy-legacy.xml", "schema-minimal.xml");
    IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore().getLatestSchema());
    assertEquals(expectCFS, iwc.getUseCompoundFile());

    assertEquals("termIndexInteval", 256, iwc.getTermIndexInterval());

    TieredMergePolicy tieredMP = assertAndCast(TieredMergePolicy.class, iwc.getMergePolicy());

    assertEquals(7, tieredMP.getMaxMergeAtOnce());
    assertEquals(7.0D, tieredMP.getSegmentsPerTier(), 0.0D);
    assertEquals(expectCFS ? 1.0D : 0.0D, tieredMP.getNoCFSRatio(), 0.0D);

    assertCommitSomeNewDocs();//w w  w.ja va2 s .co m
    assertCompoundSegments(h.getCore(), expectCFS);
}

From source file:org.apache.solr.core.TestMergePolicyConfig.java

License:Apache License

public void testTieredMergePolicyConfig() throws Exception {
    final boolean expectCFS = Boolean.parseBoolean(System.getProperty("useCompoundFile"));

    initCore("solrconfig-tieredmergepolicy.xml", "schema-minimal.xml");
    IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore().getLatestSchema());
    assertEquals(expectCFS, iwc.getUseCompoundFile());

    TieredMergePolicy tieredMP = assertAndCast(TieredMergePolicy.class, iwc.getMergePolicy());

    // set by legacy <mergeFactor> setting
    assertEquals(7, tieredMP.getMaxMergeAtOnce());

    // mp-specific setters
    assertEquals(19, tieredMP.getMaxMergeAtOnceExplicit());
    assertEquals(0.1D, tieredMP.getNoCFSRatio(), 0.0D);
    // make sure we overrode segmentsPerTier 
    // (split from maxMergeAtOnce out of mergeFactor)
    assertEquals(9D, tieredMP.getSegmentsPerTier(), 0.001);

    assertCommitSomeNewDocs();// w  w w  .ja v a 2 s. c  o  m
    // even though we have a single segment (which is 100% of the size of 
    // the index which is higher then our 0.6D threashold) the
    // compound ratio doesn't matter because the segment was never merged
    assertCompoundSegments(h.getCore(), expectCFS);

    assertCommitSomeNewDocs();
    assertNumSegments(h.getCore(), 2);
    assertCompoundSegments(h.getCore(), expectCFS);

    assertU(optimize());
    assertNumSegments(h.getCore(), 1);
    // we've now forced a merge, and the MP ratio should be in play
    assertCompoundSegments(h.getCore(), false);
}

From source file:org.apache.solr.core.TestMergePolicyConfig.java

License:Apache License

public void testLogMergePolicyConfig() throws Exception {

    final Class<? extends LogMergePolicy> mpClass = random().nextBoolean() ? LogByteSizeMergePolicy.class
            : LogDocMergePolicy.class;

    System.setProperty("solr.test.log.merge.policy", mpClass.getName());

    initCore("solrconfig-logmergepolicy.xml", "schema-minimal.xml");
    IndexWriterConfig iwc = solrConfig.indexConfig.toIndexWriterConfig(h.getCore().getLatestSchema());

    // verify some props set to -1 get lucene internal defaults
    assertEquals(-1, solrConfig.indexConfig.maxBufferedDocs);
    assertEquals(IndexWriterConfig.DISABLE_AUTO_FLUSH, iwc.getMaxBufferedDocs());
    assertEquals(-1, solrConfig.indexConfig.maxIndexingThreads);
    assertEquals(IndexWriterConfig.DEFAULT_MAX_THREAD_STATES, iwc.getMaxThreadStates());
    assertEquals(-1, solrConfig.indexConfig.ramBufferSizeMB, 0.0D);
    assertEquals(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB, iwc.getRAMBufferSizeMB(), 0.0D);

    LogMergePolicy logMP = assertAndCast(mpClass, iwc.getMergePolicy());

    // set by legacy <mergeFactor> setting
    assertEquals(11, logMP.getMergeFactor());
    // set by legacy <maxMergeDocs> setting
    assertEquals(456, logMP.getMaxMergeDocs());

}

From source file:org.apache.solr.update.SolrIndexConfigTest.java

License:Apache License

@Test
public void testTieredMPSolrIndexConfigCreation() throws Exception {
    SolrConfig solrConfig = new SolrConfig("solr" + File.separator + "collection1",
            "solrconfig-tieredmergepolicy.xml", null);
    SolrIndexConfig solrIndexConfig = new SolrIndexConfig(solrConfig, null, null);
    assertNotNull(solrIndexConfig);/*from w  ww.jav a 2 s .  co  m*/
    IndexSchema indexSchema = IndexSchemaFactory.buildIndexSchema("schema.xml", solrConfig);

    IndexWriterConfig iwc = solrIndexConfig.toIndexWriterConfig(indexSchema);

    assertNotNull("null mp", iwc.getMergePolicy());
    assertTrue("mp is not TMP", iwc.getMergePolicy() instanceof TieredMergePolicy);
    TieredMergePolicy mp = (TieredMergePolicy) iwc.getMergePolicy();
    assertEquals("mp.maxMergeAtOnceExplicit", 19, mp.getMaxMergeAtOnceExplicit());
    assertEquals("mp.segmentsPerTier", 9, (int) mp.getSegmentsPerTier());

    assertNotNull("null ms", iwc.getMergeScheduler());
    assertTrue("ms is not CMS", iwc.getMergeScheduler() instanceof ConcurrentMergeScheduler);
    ConcurrentMergeScheduler ms = (ConcurrentMergeScheduler) iwc.getMergeScheduler();
    assertEquals("ms.maxMergeCount", 987, ms.getMaxMergeCount());
    assertEquals("ms.maxThreadCount", 42, ms.getMaxThreadCount());

}

From source file:org.elasticsearch.common.lucene.uid.VersionsTests.java

License:Apache License

@Test
public void testMergingOldIndices() throws Exception {
    final IndexWriterConfig iwConf = new IndexWriterConfig(Lucene.VERSION, new KeywordAnalyzer());
    iwConf.setMergePolicy(new IndexUpgraderMergePolicy(iwConf.getMergePolicy()));
    final Directory dir = newDirectory();
    final IndexWriter iw = new IndexWriter(dir, iwConf);

    // 1st segment, no _version
    Document document = new Document();
    // Add a dummy field (enough to trigger #3237)
    document.add(new StringField("a", "b", Store.NO));
    StringField uid = new StringField(UidFieldMapper.NAME, "1", Store.YES);
    document.add(uid);//from  w  w w .j av  a 2  s  .c  o  m
    iw.addDocument(document);
    uid.setStringValue("2");
    iw.addDocument(document);
    iw.commit();

    // 2nd segment, old layout
    document = new Document();
    UidField uidAndVersion = new UidField("3", 3L);
    document.add(uidAndVersion);
    iw.addDocument(document);
    uidAndVersion.uid = "4";
    uidAndVersion.version = 4L;
    iw.addDocument(document);
    iw.commit();

    // 3rd segment new layout
    document = new Document();
    uid.setStringValue("5");
    Field version = new NumericDocValuesField(VersionFieldMapper.NAME, 5L);
    document.add(uid);
    document.add(version);
    iw.addDocument(document);
    uid.setStringValue("6");
    version.setLongValue(6L);
    iw.addDocument(document);
    iw.commit();

    final Map<String, Long> expectedVersions = ImmutableMap.<String, Long>builder().put("1", 0L).put("2", 0L)
            .put("3", 0L).put("4", 4L).put("5", 5L).put("6", 6L).build();

    // Force merge and check versions
    iw.forceMerge(1);
    final AtomicReader ir = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(iw.getDirectory()));
    final NumericDocValues versions = ir.getNumericDocValues(VersionFieldMapper.NAME);
    assertThat(versions, notNullValue());
    for (int i = 0; i < ir.maxDoc(); ++i) {
        final String uidValue = ir.document(i).get(UidFieldMapper.NAME);
        final long expectedVersion = expectedVersions.get(uidValue);
        assertThat(versions.get(i), equalTo(expectedVersion));
    }

    iw.close();
    assertThat(IndexWriter.isLocked(iw.getDirectory()), is(false));
    ir.close();
    dir.close();
}

From source file:org.elasticsearch.test.unit.common.lucene.uid.VersionsTests.java

License:Apache License

@Test
public void testMergingOldIndices() throws Exception {
    final IndexWriterConfig iwConf = new IndexWriterConfig(Lucene.VERSION, new KeywordAnalyzer());
    iwConf.setMergePolicy(new IndexUpgraderMergePolicy(iwConf.getMergePolicy()));
    final Directory dir = newDirectory();
    final IndexWriter iw = new IndexWriter(dir, iwConf);

    // 1st segment, no _version
    Document document = new Document();
    // Add a dummy field (enough to trigger #3237)
    document.add(new StringField("a", "b", Store.NO));
    StringField uid = new StringField(UidFieldMapper.NAME, "1", Store.YES);
    document.add(uid);//from w  ww .j  av  a 2  s .co  m
    iw.addDocument(document);
    uid.setStringValue("2");
    iw.addDocument(document);
    iw.commit();

    // 2nd segment, old layout
    document = new Document();
    UidField uidAndVersion = new UidField("3", 3L);
    document.add(uidAndVersion);
    iw.addDocument(document);
    uidAndVersion.uid = "4";
    uidAndVersion.version = 4L;
    iw.addDocument(document);
    iw.commit();

    // 3rd segment new layout
    document = new Document();
    uid.setStringValue("5");
    Field version = new NumericDocValuesField(UidFieldMapper.VERSION, 5L);
    document.add(uid);
    document.add(version);
    iw.addDocument(document);
    uid.setStringValue("6");
    version.setLongValue(6L);
    iw.addDocument(document);
    iw.commit();

    final Map<String, Long> expectedVersions = ImmutableMap.<String, Long>builder().put("1", 0L).put("2", 0L)
            .put("3", 0L).put("4", 4L).put("5", 5L).put("6", 6L).build();

    // Force merge and check versions
    iw.forceMerge(1);
    final AtomicReader ir = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(iw.getDirectory()));
    final NumericDocValues versions = ir.getNumericDocValues(UidFieldMapper.VERSION);
    assertThat(versions, notNullValue());
    for (int i = 0; i < ir.maxDoc(); ++i) {
        final String uidValue = ir.document(i).get(UidFieldMapper.NAME);
        final long expectedVersion = expectedVersions.get(uidValue);
        assertThat(versions.get(i), equalTo(expectedVersion));
    }

    iw.close();
    assertThat(IndexWriter.isLocked(iw.getDirectory()), is(false));
    ir.close();
    dir.close();
}

From source file:org.eu.bitzone.Leia.java

License:Apache License

private IndexWriter createIndexWriter() {
    try {/*from   w w w.j ava 2s .  c om*/
        final IndexWriterConfig cfg = new IndexWriterConfig(LV, new WhitespaceAnalyzer(LV));
        IndexDeletionPolicy policy;
        if (keepCommits) {
            policy = new KeepAllIndexDeletionPolicy();
        } else {
            policy = new KeepLastIndexDeletionPolicy();
        }
        cfg.setIndexDeletionPolicy(policy);
        final MergePolicy mp = cfg.getMergePolicy();
        cfg.setUseCompoundFile(IndexGate.preferCompoundFormat(dir));
        final IndexWriter iw = new IndexWriter(dir, cfg);
        return iw;
    } catch (final Exception e) {
        errorMsg("Error creating IndexWriter: " + e.toString());
        return null;
    }
}

From source file:org.eu.bitzone.Leia.java

License:Apache License

/**
 * Optimize the index.//from   w  w  w  .  ja  v a  2 s  . co  m
 */
public void optimize(final Object dialog) {
    final Thread t = new Thread() {

        @Override
        public void run() {
            IndexWriter iw = null;
            final Object optimizeButton = find(dialog, "optimizeButton");
            setBoolean(optimizeButton, "enabled", false);
            final Object closeButton = find(dialog, "closeButton");
            setBoolean(closeButton, "enabled", false);
            final Object msg = find(dialog, "msg");
            final Object stat = find(dialog, "stat");
            setString(stat, "text", "Running ...");
            final PanelPrintWriter ppw = new PanelPrintWriter(Leia.this, msg);
            final boolean useCompound = getBoolean(find(dialog, "optCompound"), "selected");
            final boolean expunge = getBoolean(find(dialog, "optExpunge"), "selected");
            final boolean keep = getBoolean(find(dialog, "optKeepAll"), "selected");
            final boolean useLast = getBoolean(find(dialog, "optLastCommit"), "selected");
            final Object tiiSpin = find(dialog, "tii");
            final Object segnumSpin = find(dialog, "segnum");
            final int tii = Integer.parseInt(getString(tiiSpin, "text"));
            final int segnum = Integer.parseInt(getString(segnumSpin, "text"));
            try {
                if (is != null) {
                    is = null;
                }
                if (ir != null) {
                    ir.close();
                }
                if (ar != null) {
                    ar.close();
                }
                IndexDeletionPolicy policy;
                if (keep) {
                    policy = new KeepAllIndexDeletionPolicy();
                } else {
                    policy = new KeepLastIndexDeletionPolicy();
                }
                final IndexWriterConfig cfg = new IndexWriterConfig(LV, new WhitespaceAnalyzer(LV));
                if (!useLast) {
                    final IndexCommit ic = ((DirectoryReader) ir).getIndexCommit();
                    if (ic != null) {
                        cfg.setIndexCommit(ic);
                    }
                }
                cfg.setIndexDeletionPolicy(policy);
                cfg.setTermIndexInterval(tii);
                final MergePolicy p = cfg.getMergePolicy();
                cfg.setUseCompoundFile(useCompound);
                if (useCompound) {
                    p.setNoCFSRatio(1.0);
                }
                cfg.setInfoStream(ppw);
                iw = new IndexWriter(dir, cfg);
                final long startSize = Util.calcTotalFileSize(pName, dir);
                final long startTime = System.currentTimeMillis();
                if (expunge) {
                    iw.forceMergeDeletes();
                } else {
                    if (segnum > 1) {
                        iw.forceMerge(segnum, true);
                    } else {
                        iw.forceMerge(1, true);
                    }
                }
                iw.commit();
                final long endTime = System.currentTimeMillis();
                final long endSize = Util.calcTotalFileSize(pName, dir);
                final long deltaSize = startSize - endSize;
                final String sign = deltaSize < 0 ? " Increased " : " Reduced ";
                final String sizeMsg = sign + Util.normalizeSize(Math.abs(deltaSize))
                        + Util.normalizeUnit(Math.abs(deltaSize));
                final String timeMsg = String.valueOf(endTime - startTime) + " ms";
                showStatus(sizeMsg + " in " + timeMsg);
                iw.close();
                setString(stat, "text", "Finished OK.");
            } catch (final Exception e) {
                e.printStackTrace(ppw);
                setString(stat, "text", "ERROR - aborted.");
                errorMsg("ERROR optimizing: " + e.toString());
                if (iw != null) {
                    try {
                        iw.close();
                    } catch (final Exception e1) {
                    }
                }
            } finally {
                setBoolean(closeButton, "enabled", true);
            }
            try {
                actionReopen();
                is = new IndexSearcher(ir);
                // add dialog again
                add(dialog);
            } catch (final Exception e) {
                e.printStackTrace(ppw);
                errorMsg("ERROR reopening after optimize:\n" + e.getMessage());
            }
        }
    };
    t.start();
}

From source file:org.exoplatform.services.jcr.impl.core.query.lucene.AbstractIndex.java

License:Apache License

/**
 * Returns an <code>IndexWriter</code> on this index.
 * @return an <code>IndexWriter</code> on this index.
 * @throws IOException if the writer cannot be obtained.
 *///from   w w  w.ja  v a 2  s  .c o m
protected synchronized IndexWriter getIndexWriter() throws IOException {
    if (indexReader != null) {
        indexReader.close();
        log.debug("closing IndexReader.");
        indexReader = null;
    }
    if (indexWriter == null) {
        IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_30, analyzer);
        config.setSimilarity(similarity);
        if (config.getMergePolicy() instanceof LogMergePolicy) {
            ((LogMergePolicy) config.getMergePolicy()).setUseCompoundFile(useCompoundFile);
        } else {
            log.error("Can't set \"UseCompoundFile\". Merge policy is not an instance of LogMergePolicy. ");
        }
        indexWriter = new IndexWriter(directory, config);
        setUseCompoundFile(useCompoundFile);
        indexWriter.setInfoStream(STREAM_LOGGER);
    }
    return indexWriter;
}

From source file:org.exoplatform.services.jcr.impl.core.query.lucene.AbstractIndex.java

License:Apache License

/**
 * The lucene index writer property: useCompountFile
 *//*w  w  w.j  av a 2 s .  c o  m*/
void setUseCompoundFile(boolean b) {
    useCompoundFile = b;
    if (indexWriter != null) {
        IndexWriterConfig config = indexWriter.getConfig();
        if (config.getMergePolicy() instanceof LogMergePolicy) {
            ((LogMergePolicy) config.getMergePolicy()).setUseCompoundFile(useCompoundFile);
            ((LogMergePolicy) config.getMergePolicy()).setNoCFSRatio(1.0);
        } else {
            log.error("Can't set \"UseCompoundFile\". Merge policy is not an instance of LogMergePolicy. ");
        }
    }
}