Example usage for org.apache.lucene.index IndexWriter forceMerge

List of usage examples for org.apache.lucene.index IndexWriter forceMerge

Introduction

In this page you can find the example usage for org.apache.lucene.index IndexWriter forceMerge.

Prototype

public void forceMerge(int maxNumSegments, boolean doWait) throws IOException 

Source Link

Document

Just like #forceMerge(int) , except you can specify whether the call should block until all merging completes.

Usage

From source file:org.elasticsearch.search.suggest.completion.CompletionPostingsFormatTests.java

License:Apache License

public Lookup buildAnalyzingLookup(final CompletionFieldMapper mapper, String[] terms, String[] surfaces,
        long[] weights) throws IOException {
    RAMDirectory dir = new RAMDirectory();
    Codec codec = new Lucene54Codec() {
        @Override// w w w  .j  a  v  a2s . c  om
        public PostingsFormat getPostingsFormatForField(String field) {
            final PostingsFormat in = super.getPostingsFormatForField(field);
            return mapper.fieldType().postingsFormat(in);
        }
    };
    IndexWriterConfig indexWriterConfig = new IndexWriterConfig(mapper.fieldType().indexAnalyzer());

    indexWriterConfig.setCodec(codec);
    IndexWriter writer = new IndexWriter(dir, indexWriterConfig);
    for (int i = 0; i < weights.length; i++) {
        Document doc = new Document();
        BytesRef payload = mapper.buildPayload(new BytesRef(surfaces[i]), weights[i],
                new BytesRef(Long.toString(weights[i])));
        doc.add(mapper.getCompletionField(ContextMapping.EMPTY_CONTEXT, terms[i], payload));
        if (randomBoolean()) {
            writer.commit();
        }
        writer.addDocument(doc);
    }
    writer.commit();
    writer.forceMerge(1, true);
    writer.commit();
    DirectoryReader reader = DirectoryReader.open(writer, true);
    assertThat(reader.leaves().size(), equalTo(1));
    assertThat(reader.leaves().get(0).reader().numDocs(), equalTo(weights.length));
    LeafReaderContext atomicReaderContext = reader.leaves().get(0);
    Terms luceneTerms = atomicReaderContext.reader().terms(mapper.fieldType().names().fullName());
    Lookup lookup = ((Completion090PostingsFormat.CompletionTerms) luceneTerms).getLookup(mapper.fieldType(),
            new CompletionSuggestionContext(null));
    reader.close();
    writer.close();
    dir.close();
    return lookup;
}

From source file:org.elasticsearch.search.suggest.completion.old.CompletionPostingsFormatTest.java

License:Apache License

public Lookup buildAnalyzingLookup(final OldCompletionFieldMapper mapper, String[] terms, String[] surfaces,
        long[] weights) throws IOException {
    RAMDirectory dir = new RAMDirectory();
    Codec codec = new Lucene50Codec() {
        public PostingsFormat getPostingsFormatForField(String field) {
            final PostingsFormat in = super.getPostingsFormatForField(field);
            return mapper.fieldType().postingsFormat(in);
        }//from   w w w.  j  a  v  a 2 s . co m
    };
    IndexWriterConfig indexWriterConfig = new IndexWriterConfig(mapper.fieldType().indexAnalyzer());

    indexWriterConfig.setCodec(codec);
    IndexWriter writer = new IndexWriter(dir, indexWriterConfig);
    for (int i = 0; i < weights.length; i++) {
        Document doc = new Document();
        BytesRef payload = mapper.buildPayload(new BytesRef(surfaces[i]), weights[i],
                new BytesRef(Long.toString(weights[i])));
        doc.add(mapper.getCompletionField(ContextMapping.EMPTY_CONTEXT, terms[i], payload));
        if (randomBoolean()) {
            writer.commit();
        }
        writer.addDocument(doc);
    }
    writer.commit();
    writer.forceMerge(1, true);
    writer.commit();
    DirectoryReader reader = DirectoryReader.open(writer, true);
    assertThat(reader.leaves().size(), equalTo(1));
    assertThat(reader.leaves().get(0).reader().numDocs(), equalTo(weights.length));
    LeafReaderContext atomicReaderContext = reader.leaves().get(0);
    Terms luceneTerms = atomicReaderContext.reader().terms(mapper.fieldType().names().fullName());
    Lookup lookup = ((Completion090PostingsFormat.CompletionTerms) luceneTerms).getLookup(mapper.fieldType(),
            new CompletionSuggestionContext(null));
    reader.close();
    writer.close();
    dir.close();
    return lookup;
}

From source file:org.eu.bitzone.Leia.java

License:Apache License

/**
 * Optimize the index./*from   w w w  . j  a  v a2  s.c  om*/
 */
public void optimize(final Object dialog) {
    final Thread t = new Thread() {

        @Override
        public void run() {
            IndexWriter iw = null;
            final Object optimizeButton = find(dialog, "optimizeButton");
            setBoolean(optimizeButton, "enabled", false);
            final Object closeButton = find(dialog, "closeButton");
            setBoolean(closeButton, "enabled", false);
            final Object msg = find(dialog, "msg");
            final Object stat = find(dialog, "stat");
            setString(stat, "text", "Running ...");
            final PanelPrintWriter ppw = new PanelPrintWriter(Leia.this, msg);
            final boolean useCompound = getBoolean(find(dialog, "optCompound"), "selected");
            final boolean expunge = getBoolean(find(dialog, "optExpunge"), "selected");
            final boolean keep = getBoolean(find(dialog, "optKeepAll"), "selected");
            final boolean useLast = getBoolean(find(dialog, "optLastCommit"), "selected");
            final Object tiiSpin = find(dialog, "tii");
            final Object segnumSpin = find(dialog, "segnum");
            final int tii = Integer.parseInt(getString(tiiSpin, "text"));
            final int segnum = Integer.parseInt(getString(segnumSpin, "text"));
            try {
                if (is != null) {
                    is = null;
                }
                if (ir != null) {
                    ir.close();
                }
                if (ar != null) {
                    ar.close();
                }
                IndexDeletionPolicy policy;
                if (keep) {
                    policy = new KeepAllIndexDeletionPolicy();
                } else {
                    policy = new KeepLastIndexDeletionPolicy();
                }
                final IndexWriterConfig cfg = new IndexWriterConfig(LV, new WhitespaceAnalyzer(LV));
                if (!useLast) {
                    final IndexCommit ic = ((DirectoryReader) ir).getIndexCommit();
                    if (ic != null) {
                        cfg.setIndexCommit(ic);
                    }
                }
                cfg.setIndexDeletionPolicy(policy);
                cfg.setTermIndexInterval(tii);
                final MergePolicy p = cfg.getMergePolicy();
                cfg.setUseCompoundFile(useCompound);
                if (useCompound) {
                    p.setNoCFSRatio(1.0);
                }
                cfg.setInfoStream(ppw);
                iw = new IndexWriter(dir, cfg);
                final long startSize = Util.calcTotalFileSize(pName, dir);
                final long startTime = System.currentTimeMillis();
                if (expunge) {
                    iw.forceMergeDeletes();
                } else {
                    if (segnum > 1) {
                        iw.forceMerge(segnum, true);
                    } else {
                        iw.forceMerge(1, true);
                    }
                }
                iw.commit();
                final long endTime = System.currentTimeMillis();
                final long endSize = Util.calcTotalFileSize(pName, dir);
                final long deltaSize = startSize - endSize;
                final String sign = deltaSize < 0 ? " Increased " : " Reduced ";
                final String sizeMsg = sign + Util.normalizeSize(Math.abs(deltaSize))
                        + Util.normalizeUnit(Math.abs(deltaSize));
                final String timeMsg = String.valueOf(endTime - startTime) + " ms";
                showStatus(sizeMsg + " in " + timeMsg);
                iw.close();
                setString(stat, "text", "Finished OK.");
            } catch (final Exception e) {
                e.printStackTrace(ppw);
                setString(stat, "text", "ERROR - aborted.");
                errorMsg("ERROR optimizing: " + e.toString());
                if (iw != null) {
                    try {
                        iw.close();
                    } catch (final Exception e1) {
                    }
                }
            } finally {
                setBoolean(closeButton, "enabled", true);
            }
            try {
                actionReopen();
                is = new IndexSearcher(ir);
                // add dialog again
                add(dialog);
            } catch (final Exception e) {
                e.printStackTrace(ppw);
                errorMsg("ERROR reopening after optimize:\n" + e.getMessage());
            }
        }
    };
    t.start();
}

From source file:org.exist.indexing.lucene.LuceneIndexWorker.java

License:Open Source License

/**
 * Optimize the Lucene index by merging all segments into a single one. This
 * may take a while and write operations will be blocked during the optimize.
 *//* www. ja  va  2  s .co  m*/
public void optimize() {
    IndexWriter writer = null;
    try {
        writer = index.getWriter(true);
        writer.forceMerge(1, true);
        writer.commit();
    } catch (IOException e) {
        LOG.warn("An exception was caught while optimizing the lucene index: " + e.getMessage(), e);
    } finally {
        index.releaseWriter(writer);
    }
}

From source file:org.getopt.luke.Luke.java

License:Apache License

/**
 * Optimize the index.//from  ww w.j a v a 2  s.c  o m
 */
public void optimize(final Object dialog) {
    Thread t = new Thread() {
        public void run() {
            IndexWriter iw = null;
            Object optimizeButton = find(dialog, "optimizeButton");
            setBoolean(optimizeButton, "enabled", false);
            Object closeButton = find(dialog, "closeButton");
            setBoolean(closeButton, "enabled", false);
            Object msg = find(dialog, "msg");
            Object stat = find(dialog, "stat");
            setString(stat, "text", "Running ...");
            PanelPrintWriter ppw = new PanelPrintWriter(Luke.this, msg);
            boolean useCompound = getBoolean(find(dialog, "optCompound"), "selected");
            boolean expunge = getBoolean(find(dialog, "optExpunge"), "selected");
            boolean keep = getBoolean(find(dialog, "optKeepAll"), "selected");
            boolean useLast = getBoolean(find(dialog, "optLastCommit"), "selected");
            Object tiiSpin = find(dialog, "tii");
            Object segnumSpin = find(dialog, "segnum");
            int tii = Integer.parseInt(getString(tiiSpin, "text"));
            int segnum = Integer.parseInt(getString(segnumSpin, "text"));
            try {
                if (is != null)
                    is = null;
                if (ir != null)
                    ir.close();
                if (ar != null)
                    ar.close();
                IndexDeletionPolicy policy;
                if (keep) {
                    policy = new KeepAllIndexDeletionPolicy();
                } else {
                    policy = new KeepLastIndexDeletionPolicy();
                }
                IndexWriterConfig cfg = new IndexWriterConfig(LV, new WhitespaceAnalyzer(LV));
                if (!useLast) {
                    IndexCommit ic = ((DirectoryReader) ir).getIndexCommit();
                    if (ic != null) {
                        cfg.setIndexCommit(ic);
                    }
                }
                cfg.setIndexDeletionPolicy(policy);
                cfg.setTermIndexInterval(tii);
                cfg.setUseCompoundFile(useCompound);
                cfg.setInfoStream(ppw);
                iw = new IndexWriter(dir, cfg);
                long startSize = Util.calcTotalFileSize(pName, dir);
                long startTime = System.currentTimeMillis();
                if (expunge) {
                    iw.forceMergeDeletes();
                } else {
                    if (segnum > 1) {
                        iw.forceMerge(segnum, true);
                    } else {
                        iw.forceMerge(1, true);
                    }
                }
                iw.commit();
                long endTime = System.currentTimeMillis();
                long endSize = Util.calcTotalFileSize(pName, dir);
                long deltaSize = startSize - endSize;
                String sign = deltaSize < 0 ? " Increased " : " Reduced ";
                String sizeMsg = sign + Util.normalizeSize(Math.abs(deltaSize))
                        + Util.normalizeUnit(Math.abs(deltaSize));
                String timeMsg = String.valueOf(endTime - startTime) + " ms";
                showStatus(sizeMsg + " in " + timeMsg);
                iw.close();
                setString(stat, "text", "Finished OK.");
            } catch (Exception e) {
                e.printStackTrace(ppw);
                setString(stat, "text", "ERROR - aborted.");
                errorMsg("ERROR optimizing: " + e.toString());
                if (iw != null)
                    try {
                        iw.close();
                    } catch (Exception e1) {
                    }
            } finally {
                setBoolean(closeButton, "enabled", true);
            }
            try {
                actionReopen();
                is = new IndexSearcher(ir);
                // add dialog again
                add(dialog);
            } catch (Exception e) {
                e.printStackTrace(ppw);
                errorMsg("ERROR reopening after optimize:\n" + e.getMessage());
            }
        }
    };
    t.start();
}

From source file:org.hibernate.search.store.optimization.impl.ExplicitOnlyOptimizerStrategy.java

License:Open Source License

@Override
public boolean performOptimization(IndexWriter writer) {
    boolean acquired = optimizerIsBusy.compareAndSet(false, true);
    if (acquired) {
        try {/*from  www  .j  ava  2 s . c o m*/
            writer.forceMerge(1, true);
        } catch (IOException e) {
            throw new SearchException("Unable to optimize directoryProvider: " + indexName, e);
        } finally {
            optimizerIsBusy.set(false);
        }
        return true;
    } else {
        log.optimizationSkippedStillBusy(indexName);
        return false;
    }
}

From source file:proj.zoie.impl.indexing.internal.LuceneIndexDataLoader.java

License:Apache License

public void optimize(int numSegs) throws IOException {
    long t0 = System.currentTimeMillis();
    if (numSegs <= 1)
        numSegs = 1;/* w  ww.j  a  v a  2  s .  co m*/
    log.info("optmizing, numSegs: " + numSegs + " ...");

    // we should optimize
    synchronized (_optimizeMonitor) {
        BaseSearchIndex<R> idx = getSearchIndex();
        IndexWriter writer = null;
        try {
            writer = idx.openIndexWriter(_analyzer, _similarity);
            writer.forceMerge(numSegs, true);
            writer.commit();
        } finally {
            if (writer != null) {
                idx.closeIndexWriter();
            }
        }
        _idxMgr.refreshDiskReader();
    }
    log.info("index optimized in " + (System.currentTimeMillis() - t0) + "ms");
}