Example usage for org.apache.lucene.index IndexWriter hasUncommittedChanges

List of usage examples for org.apache.lucene.index IndexWriter hasUncommittedChanges

Introduction

In this page you can find the example usage for org.apache.lucene.index IndexWriter hasUncommittedChanges.

Prototype

public final boolean hasUncommittedChanges() 

Source Link

Document

Returns true if there may be changes that have not been committed.

Usage

From source file:com.qwazr.search.index.IndexInstance.java

License:Apache License

/**
 * @param schema/*from w  w  w  .j av a2  s .c om*/
 * @param indexDirectory
 * @return
 */
final static IndexInstance newInstance(SchemaInstance schema, File indexDirectory,
        IndexSettingsDefinition settings)
        throws ServerException, IOException, ReflectiveOperationException, InterruptedException {
    UpdatableAnalyzer indexAnalyzer = null;
    UpdatableAnalyzer queryAnalyzer = null;
    IndexWriter indexWriter = null;
    Directory dataDirectory = null;
    try {

        if (!indexDirectory.exists())
            indexDirectory.mkdir();
        if (!indexDirectory.isDirectory())
            throw new IOException(
                    "This name is not valid. No directory exists for this location: " + indexDirectory);

        FileSet fileSet = new FileSet(indexDirectory);

        //Loading the settings
        if (settings == null) {
            settings = fileSet.settingsFile.exists()
                    ? JsonMapper.MAPPER.readValue(fileSet.settingsFile, IndexSettingsDefinition.class)
                    : IndexSettingsDefinition.EMPTY;
        } else {
            JsonMapper.MAPPER.writeValue(fileSet.settingsFile, settings);
        }

        //Loading the fields
        File fieldMapFile = new File(indexDirectory, FIELDS_FILE);
        LinkedHashMap<String, FieldDefinition> fieldMap = fieldMapFile.exists()
                ? JsonMapper.MAPPER.readValue(fieldMapFile, FieldDefinition.MapStringFieldTypeRef)
                : new LinkedHashMap<>();

        //Loading the fields
        File analyzerMapFile = new File(indexDirectory, ANALYZERS_FILE);
        LinkedHashMap<String, AnalyzerDefinition> analyzerMap = analyzerMapFile.exists()
                ? JsonMapper.MAPPER.readValue(analyzerMapFile, AnalyzerDefinition.MapStringAnalyzerTypeRef)
                : new LinkedHashMap<>();

        AnalyzerContext context = new AnalyzerContext(analyzerMap, fieldMap);
        indexAnalyzer = new UpdatableAnalyzer(context, context.indexAnalyzerMap);
        queryAnalyzer = new UpdatableAnalyzer(context, context.queryAnalyzerMap);

        // Open and lock the data directory
        dataDirectory = FSDirectory.open(fileSet.dataDirectory.toPath());

        // Set
        IndexWriterConfig indexWriterConfig = new IndexWriterConfig(indexAnalyzer);
        if (settings != null && settings.similarity_class != null)
            indexWriterConfig.setSimilarity(IndexUtils.findSimilarity(settings.similarity_class));
        indexWriterConfig.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND);
        SnapshotDeletionPolicy snapshotDeletionPolicy = new SnapshotDeletionPolicy(
                indexWriterConfig.getIndexDeletionPolicy());
        indexWriterConfig.setIndexDeletionPolicy(snapshotDeletionPolicy);
        indexWriter = new IndexWriter(dataDirectory, indexWriterConfig);
        if (indexWriter.hasUncommittedChanges())
            indexWriter.commit();

        // Finally we build the SearchSearcherManger
        SearcherManager searcherManager = new SearcherManager(indexWriter, true, null);

        return new IndexInstance(schema, dataDirectory, settings, analyzerMap, fieldMap, fileSet, indexWriter,
                searcherManager, queryAnalyzer);
    } catch (IOException | ServerException | ReflectiveOperationException | InterruptedException e) {
        // We failed in opening the index. We close everything we can
        if (queryAnalyzer != null)
            IOUtils.closeQuietly(queryAnalyzer);
        if (indexAnalyzer != null)
            IOUtils.closeQuietly(indexAnalyzer);
        if (indexWriter != null)
            IOUtils.closeQuietly(indexWriter);
        if (dataDirectory != null)
            IOUtils.closeQuietly(dataDirectory);
        throw e;
    }
}

From source file:org.apache.solr.update.DirectUpdateHandler2.java

License:Apache License

@Override
public void commit(CommitUpdateCommand cmd) throws IOException {
    if (cmd.prepareCommit) {
        prepareCommit(cmd);/*from  w w  w  .  j  ava  2  s .co  m*/
        return;
    }

    if (cmd.optimize) {
        optimizeCommands.incrementAndGet();
    } else {
        commitCommands.incrementAndGet();
        if (cmd.expungeDeletes)
            expungeDeleteCommands.incrementAndGet();
    }

    Future[] waitSearcher = null;
    if (cmd.waitSearcher) {
        waitSearcher = new Future[1];
    }

    boolean error = true;
    try {
        // only allow one hard commit to proceed at once
        if (!cmd.softCommit) {
            solrCoreState.getCommitLock().lock();
        }

        log.info("start " + cmd);

        // We must cancel pending commits *before* we actually execute the commit.

        if (cmd.openSearcher) {
            // we can cancel any pending soft commits if this commit will open a new searcher
            softCommitTracker.cancelPendingCommit();
        }
        if (!cmd.softCommit && (cmd.openSearcher || !commitTracker.getOpenSearcher())) {
            // cancel a pending hard commit if this commit is of equal or greater "strength"...
            // If the autoCommit has openSearcher=true, then this commit must have openSearcher=true
            // to cancel.
            commitTracker.cancelPendingCommit();
        }

        RefCounted<IndexWriter> iw = solrCoreState.getIndexWriter(core);
        try {
            IndexWriter writer = iw.get();
            if (cmd.optimize) {
                writer.forceMerge(cmd.maxOptimizeSegments);
            } else if (cmd.expungeDeletes) {
                writer.forceMergeDeletes();
            }

            if (!cmd.softCommit) {
                synchronized (solrCoreState.getUpdateLock()) { // sync is currently needed to prevent preCommit
                    // from being called between preSoft and
                    // postSoft... see postSoft comments.
                    if (ulog != null)
                        ulog.preCommit(cmd);
                }

                // SolrCore.verbose("writer.commit() start writer=",writer);

                if (writer.hasUncommittedChanges()) {
                    final Map<String, String> commitData = new HashMap<String, String>();
                    commitData.put(SolrIndexWriter.COMMIT_TIME_MSEC_KEY,
                            String.valueOf(System.currentTimeMillis()));
                    writer.setCommitData(commitData);
                    writer.commit();
                } else {
                    log.info("No uncommitted changes. Skipping IW.commit.");
                }

                // SolrCore.verbose("writer.commit() end");
                numDocsPending.set(0);
                callPostCommitCallbacks();
            } else {
                callPostSoftCommitCallbacks();
            }
        } finally {
            iw.decref();
        }

        if (cmd.optimize) {
            callPostOptimizeCallbacks();
        }

        if (cmd.softCommit) {
            // ulog.preSoftCommit();
            synchronized (solrCoreState.getUpdateLock()) {
                if (ulog != null)
                    ulog.preSoftCommit(cmd);
                core.getSearcher(true, false, waitSearcher, true);
                if (ulog != null)
                    ulog.postSoftCommit(cmd);
            }
            // ulog.postSoftCommit();
        } else {
            synchronized (solrCoreState.getUpdateLock()) {
                if (ulog != null)
                    ulog.preSoftCommit(cmd);
                if (cmd.openSearcher) {
                    core.getSearcher(true, false, waitSearcher);
                } else {
                    // force open a new realtime searcher so realtime-get and versioning code can see the latest
                    RefCounted<SolrIndexSearcher> searchHolder = core.openNewSearcher(true, true);
                    searchHolder.decref();
                }
                if (ulog != null)
                    ulog.postSoftCommit(cmd);
            }
            if (ulog != null)
                ulog.postCommit(cmd); // postCommit currently means new searcher has
            // also been opened
        }

        // reset commit tracking

        if (cmd.softCommit) {
            softCommitTracker.didCommit();
        } else {
            commitTracker.didCommit();
        }

        log.info("end_commit_flush");

        error = false;
    } finally {
        if (!cmd.softCommit) {
            solrCoreState.getCommitLock().unlock();
        }

        addCommands.set(0);
        deleteByIdCommands.set(0);
        deleteByQueryCommands.set(0);
        if (error)
            numErrors.incrementAndGet();
    }

    // if we are supposed to wait for the searcher to be registered, then we should do it
    // outside any synchronized block so that other update operations can proceed.
    if (waitSearcher != null && waitSearcher[0] != null) {
        try {
            waitSearcher[0].get();
        } catch (InterruptedException e) {
            SolrException.log(log, e);
        } catch (ExecutionException e) {
            SolrException.log(log, e);
        }
    }
}

From source file:org.eclipse.dltk.internal.core.index.lucene.IndexContainer.java

License:Open Source License

synchronized boolean hasChanges() {
    for (Map<Integer, IndexWriter> dataWriters : fIndexWriters.values()) {
        for (IndexWriter writer : dataWriters.values()) {
            if (writer != null && writer.hasUncommittedChanges()) {
                return true;
            }/*  www .ja v a  2  s  . c o m*/
        }
        if (fTimestampsWriter != null) {
            return fTimestampsWriter.hasUncommittedChanges();
        }
    }
    return false;
}

From source file:org.nuxeo.ecm.core.storage.sql.db.H2Fulltext.java

License:Apache License

/**
 * Searches from the given full text index. The returned result set has a single ID column which holds the keys for
 * the matching rows./*w w w . j  a v a2  s.c  o  m*/
 * <p>
 * Usually called through:
 *
 * <pre>
 *   SELECT * FROM NXFT_SEARCH(name, 'text');
 * </pre>
 *
 * @param conn the connection
 * @param indexName the index name
 * @param text the search query
 * @return the result set
 */
public static ResultSet search(Connection conn, String indexName, String text) throws SQLException {
    DatabaseMetaData meta = conn.getMetaData();
    if (indexName == null) {
        indexName = DEFAULT_INDEX_NAME;
    }

    String schema;
    String table;
    String analyzerName;

    // find schema, table and analyzer
    try (PreparedStatement ps = conn
            .prepareStatement("SELECT SCHEMA, TABLE, ANALYZER FROM " + FT_TABLE + " WHERE NAME = ?")) {
        ps.setString(1, indexName);
        try (ResultSet res = ps.executeQuery()) {
            if (!res.next()) {
                throw new SQLException("No such index: " + indexName);
            }
            schema = res.getString(1);
            table = res.getString(2);
            analyzerName = res.getString(3);
        }
    }

    int type = getPrimaryKeyType(meta, schema, table);
    SimpleResultSet rs = new SimpleResultSet();
    rs.addColumn(COL_KEY, type, 0, 0);

    if (meta.getURL().startsWith("jdbc:columnlist:")) {
        // this is just to query the result set columns
        return rs;
    }

    // flush changes
    final IndexWriter writer = getIndexWriter(getIndexName(conn), getIndexPath(conn), analyzerName);
    if (writer.hasUncommittedChanges()) {
        try {
            writer.commit();
        } catch (IOException cause) {
            throw convertException(cause);
        }
    }

    // search index
    try {
        BooleanQuery.Builder queryBuilder = new BooleanQuery.Builder();
        String defaultField = fieldForIndex(indexName);
        Analyzer analyzer = getAnalyzer(analyzerName);
        QueryParser parser = new QueryParser(defaultField, analyzer);
        queryBuilder.add(parser.parse(text), BooleanClause.Occur.MUST);

        try (IndexReader reader = DirectoryReader.open(writer.getDirectory())) {
            IndexSearcher searcher = new IndexSearcher(reader);
            Collector collector = new ResultSetCollector(rs, reader, type);
            searcher.search(queryBuilder.build(), collector);
        }
    } catch (SQLException | ParseException | IOException e) {
        throw convertException(e);
    }
    return rs;
}