Example usage for org.apache.lucene.index IndexWriter getDirectory

List of usage examples for org.apache.lucene.index IndexWriter getDirectory

Introduction

In this page you can find the example usage for org.apache.lucene.index IndexWriter getDirectory.

Prototype

public Directory getDirectory() 

Source Link

Document

Returns the Directory used by this index.

Usage

From source file:org.ms123.common.data.lucene.LuceneServiceImpl.java

License:Open Source License

public void rollback(LuceneSession session) {
    IndexWriter tiw = session.getIndexWriter();
    try {//  w  ww.j av  a2 s  .  c o m
        tiw.getDirectory().close();
        tiw.close();
    } catch (Exception e) {
        //e.printStackTrace(); 
        System.out.println("LuceneService.rollback:" + e);
    }
}

From source file:org.neo4j.kernel.api.impl.index.backup.LuceneIndexSnapshotFileIterator.java

License:Open Source License

private static boolean hasCommits(IndexWriter indexWriter) throws IOException {
    Directory directory = indexWriter.getDirectory();
    return DirectoryReader.indexExists(directory) && SegmentInfos.readLatestCommit(directory) != null;
}

From source file:org.neo4j.kernel.api.impl.index.backup.LuceneIndexSnapshots.java

License:Open Source License

private static boolean hasCommits(IndexWriter indexWriter) throws IOException {
    Directory directory = indexWriter.getDirectory();
    return hasCommits(directory);
}

From source file:org.nuxeo.ecm.core.storage.sql.db.H2Fulltext.java

License:Apache License

/**
 * Searches from the given full text index. The returned result set has a single ID column which holds the keys for
 * the matching rows.//from  w w  w.  jav a2s.c  o  m
 * <p>
 * Usually called through:
 *
 * <pre>
 *   SELECT * FROM NXFT_SEARCH(name, 'text');
 * </pre>
 *
 * @param conn the connection
 * @param indexName the index name
 * @param text the search query
 * @return the result set
 */
public static ResultSet search(Connection conn, String indexName, String text) throws SQLException {
    DatabaseMetaData meta = conn.getMetaData();
    if (indexName == null) {
        indexName = DEFAULT_INDEX_NAME;
    }

    String schema;
    String table;
    String analyzerName;

    // find schema, table and analyzer
    try (PreparedStatement ps = conn
            .prepareStatement("SELECT SCHEMA, TABLE, ANALYZER FROM " + FT_TABLE + " WHERE NAME = ?")) {
        ps.setString(1, indexName);
        try (ResultSet res = ps.executeQuery()) {
            if (!res.next()) {
                throw new SQLException("No such index: " + indexName);
            }
            schema = res.getString(1);
            table = res.getString(2);
            analyzerName = res.getString(3);
        }
    }

    int type = getPrimaryKeyType(meta, schema, table);
    SimpleResultSet rs = new SimpleResultSet();
    rs.addColumn(COL_KEY, type, 0, 0);

    if (meta.getURL().startsWith("jdbc:columnlist:")) {
        // this is just to query the result set columns
        return rs;
    }

    // flush changes
    final IndexWriter writer = getIndexWriter(getIndexName(conn), getIndexPath(conn), analyzerName);
    if (writer.hasUncommittedChanges()) {
        try {
            writer.commit();
        } catch (IOException cause) {
            throw convertException(cause);
        }
    }

    // search index
    try {
        BooleanQuery.Builder queryBuilder = new BooleanQuery.Builder();
        String defaultField = fieldForIndex(indexName);
        Analyzer analyzer = getAnalyzer(analyzerName);
        QueryParser parser = new QueryParser(defaultField, analyzer);
        queryBuilder.add(parser.parse(text), BooleanClause.Occur.MUST);

        try (IndexReader reader = DirectoryReader.open(writer.getDirectory())) {
            IndexSearcher searcher = new IndexSearcher(reader);
            Collector collector = new ResultSetCollector(rs, reader, type);
            searcher.search(queryBuilder.build(), collector);
        }
    } catch (SQLException | ParseException | IOException e) {
        throw convertException(e);
    }
    return rs;
}

From source file:org.sakaiproject.search.component.dao.impl.SearchIndexBuilderWorkerDaoJdbcImpl.java

License:Educational Community License

private void processAdd(SearchIndexBuilderWorker worker, Connection connection,
        List<SearchBuilderItem> runtimeToDo) throws Exception {
    IndexWriter indexWrite = null;
    try {//from   w  ww.j  a  va2s .  c o m
        if (worker.isRunning()) {
            indexWrite = indexStorage.getIndexWriter(false);
        }
        long last = System.currentTimeMillis();

        for (Iterator<SearchBuilderItem> tditer = runtimeToDo.iterator(); worker.isRunning()
                && tditer.hasNext();) {

            Reader contentReader = null;
            try {
                SearchBuilderItem sbi = (SearchBuilderItem) tditer.next();
                // only add adds, that have been deleted or are locked
                // sucessfully
                if (!SearchBuilderItem.STATE_PENDING_2.equals(sbi.getSearchstate())
                        && !SearchBuilderItem.STATE_LOCKED.equals(sbi.getSearchstate())) {
                    continue;
                }
                // Reference ref =
                // entityManager.newReference(sbi.getName());
                String ref = sbi.getName();
                if (ref == null) {
                    log.error("Unrecognised trigger object presented to index builder " //$NON-NLS-1$
                            + sbi);
                }

                long startDocIndex = System.currentTimeMillis();
                worker.setStartDocIndex(startDocIndex);
                worker.setNowIndexing(ref);

                try {
                    try {
                        // Entity entity = ref.getEntity();
                        EntityContentProducer sep = searchIndexBuilder.newEntityContentProducer(ref);
                        boolean indexDoc = true;
                        if (searchIndexBuilder.isOnlyIndexSearchToolSites()) {
                            try {
                                String siteId = sep.getSiteId(sbi.getName());
                                Site s = SiteService.getSite(siteId);
                                ToolConfiguration t = s.getToolForCommonId("sakai.search"); //$NON-NLS-1$
                                if (t == null) {
                                    indexDoc = false;
                                    log.debug("Not indexing " //$NON-NLS-1$
                                            + sbi.getName() + " as it has no search tool"); //$NON-NLS-1$
                                }
                            } catch (Exception ex) {
                                indexDoc = false;
                                log.debug("Not indexing  " + sbi.getName() //$NON-NLS-1$
                                        + " as it has no site", ex); //$NON-NLS-1$

                            }
                        }
                        if (indexDoc && sep != null && sep.isForIndex(ref) && sep.getSiteId(ref) != null) {

                            DigestStorageUtil digestStorageUtil = new DigestStorageUtil(searchService);
                            //Reader contentReader = null;
                            Document doc = DocumentIndexingUtils.createIndexDocument(ref, digestStorageUtil,
                                    sep, serverConfigurationService.getServerUrl(), contentReader);
                            //indexDocTMP(ref, sep);

                            log.debug("Indexing Document " + doc); //$NON-NLS-1$

                            indexWrite.addDocument(doc);

                            log.debug("Done Indexing Document " + doc); //$NON-NLS-1$

                            processRDF(ref, sep);

                        } else {
                            if (log.isDebugEnabled()) {
                                if (!indexDoc) {
                                    log.debug("Ignored Document: Fileteed out by site " + ref); //$NON-NLS-1$
                                } else if (sep == null) {
                                    log.debug("Ignored Document: No EntityContentProducer " + ref); //$NON-NLS-1$

                                } else if (!sep.isForIndex(ref)) {
                                    log.debug("Ignored Document: Marked as Ignore " + ref); //$NON-NLS-1$

                                } else if (sep.getSiteId(ref) == null) {
                                    log.debug("Ignored Document: No Site ID " + ref); //$NON-NLS-1$

                                } else {
                                    log.debug("Ignored Document: Reason Unknown " + ref); //$NON-NLS-1$

                                }
                            }
                        }
                    } catch (Exception e1) {
                        log.debug(" Failed to index document for " + ref + " cause: " //$NON-NLS-1$
                                + e1.getMessage(), e1);
                    }
                    sbi.setSearchstate(SearchBuilderItem.STATE_COMPLETED);
                    updateOrSave(connection, sbi);

                } catch (Exception e1) {
                    log.debug(" Failed to index document cause: " //$NON-NLS-1$
                            + e1.getMessage());
                }
                long endDocIndex = System.currentTimeMillis();
                worker.setLastIndex(endDocIndex - startDocIndex);
                if ((endDocIndex - startDocIndex) > 60000L) {
                    log.warn("Slow index operation " //$NON-NLS-1$
                            + String.valueOf((endDocIndex - startDocIndex) / 1000) + " seconds to index " //$NON-NLS-1$
                            + ref);
                }
                // update this node lock to indicate its
                // still alove, no document should
                // take more than 2 mins to process
                // ony do this check once every minute
                long now = System.currentTimeMillis();
                if ((now - last) > (60L * 1000L)) {
                    last = System.currentTimeMillis();
                    if (!worker.getLockTransaction(15L * 60L * 1000L, true)) {
                        throw new Exception("Transaction Lock Expired while indexing " //$NON-NLS-1$
                                + ref);
                    }
                }

            } finally {
                if (contentReader != null) {
                    try {
                        contentReader.close();
                    } catch (IOException ioex) {
                        log.debug(ioex);
                    }
                }
            }

        }
        worker.setStartDocIndex(System.currentTimeMillis());
        worker.setNowIndexing(Messages.getString("SearchIndexBuilderWorkerDaoJdbcImpl.33")); //$NON-NLS-1$
    } catch (Exception ex) {
        log.error("Failed to Add Documents ", ex);
        throw new Exception(ex);
    } finally {
        if (indexWrite != null) {
            if (log.isDebugEnabled()) {
                log.debug("Closing Index Writer With " + indexWrite.maxDoc() + " documents");
                Directory d = indexWrite.getDirectory();
                String[] s = d.listAll();
                log.debug("Directory Contains ");
                for (int i = 0; i < s.length; i++) {
                    File f = new File(s[i]);
                    log.debug("\t" + String.valueOf(f.length()) + "\t" + new Date(f.lastModified()) + "\t"
                            + s[i]);
                }
            }
            indexStorage.closeIndexWriter(indexWrite);
        }
    }

}

From source file:org.sakaiproject.search.component.dao.impl.SearchIndexBuilderWorkerDaoJdbcImpl.java

License:Educational Community License

/**
 * @param worker//from   w ww .j a  va  2 s .  co m
 * @param connection
 * @throws Exception
 */
private void createIndex(SearchIndexBuilderWorker worker, Connection connection) throws Exception {
    IndexWriter indexWrite = null;
    try {
        if (worker.isRunning()) {
            indexWrite = indexStorage.getIndexWriter(false);
        }
        if (indexWrite != null) {
            Document doc = new Document();
            //The date of indexing
            String timeStamp = String.valueOf(System.currentTimeMillis());
            doc.add(new Field(SearchService.DATE_STAMP, timeStamp, Field.Store.NO, Field.Index.NOT_ANALYZED));
            doc.add(new Field(SearchService.DATE_STAMP, CompressionTools.compressString(timeStamp),
                    Field.Store.YES));

            String ref = "---INDEX-CREATED---";
            doc.add(new Field(SearchService.FIELD_REFERENCE, CompressionTools.compressString(ref),
                    Field.Store.YES));
            doc.add(new Field(SearchService.FIELD_REFERENCE, ref, Field.Store.NO, Field.Index.NOT_ANALYZED));

            indexWrite.addDocument(doc);
        } else {
            log.error("Couldn't get indexWriter to add document!");
        }

    } catch (Exception ex) {
        log.error("Failed to Add Documents ", ex);
        throw new Exception(ex);
    } finally {
        if (indexWrite != null) {
            if (log.isDebugEnabled()) {
                log.debug("Closing Index Writer With " + indexWrite.maxDoc() + " documents");
                Directory d = indexWrite.getDirectory();
                String[] s = d.listAll();
                log.debug("Directory Contains ");
                for (int i = 0; i < s.length; i++) {
                    File f = new File(s[i]);
                    log.debug("\t" + String.valueOf(f.length()) + "\t" + new Date(f.lastModified()) + "\t"
                            + s[i]);
                }
            }
            indexStorage.closeIndexWriter(indexWrite);
        }
    }
}

From source file:org.sakaiproject.search.optimize.impl.OptimizeTransactionListenerImpl.java

License:Educational Community License

/**
 * commit closes the temporary segment, and merges it into the permanent
 * segment. Both the temporary and permanent were opened in the prepare
 * phase//from  w  w  w  .  ja v a  2s  .  c  om
 * 
 * @see org.sakaiproject.search.transaction.api.TransactionListener#commit(org.sakaiproject.search.transaction.api.IndexTransaction)
 */
public void commit(IndexTransaction transaction) throws IndexTransactionException {
    try {
        IndexWriter iw = ((IndexOptimizeTransaction) transaction).getTemporaryIndexWriter();
        Directory d = iw.getDirectory();

        // close the temporary index
        IndexWriter pw = ((IndexOptimizeTransaction) transaction).getPermanentIndexWriter();
        // open the temp writer

        pw.addIndexesNoOptimize(new Directory[] { d });
        pw.optimize();
        pw.commit();
        pw.close();

        iw.close();

        File[] optimzableSegments = ((IndexOptimizeTransaction) transaction).getOptimizableSegments();
        log.info("Optimized Compressed " + optimzableSegments.length + " segments ");
        optimizableIndex.removeOptimizableSegments(optimzableSegments);

    } catch (IOException ioex) {
        throw new OptimizedFailedIndexTransactionException("Failed to commit index merge operation ", ioex);
    }

}

From source file:org.sonatype.nexus.index.updater.IndexDataReader.java

License:Open Source License

public IndexDataReadResult readIndex(IndexWriter w, IndexingContext context) throws IOException {
    dis.readByte(); // data format version

    long timestamp = dis.readLong();

    Date date = null;/*from   w  w w .  j ava2 s  .c o  m*/

    if (timestamp != -1) {
        date = new Date(timestamp);

        IndexUtils.updateTimestamp(w.getDirectory(), date);
    }

    int n = 0;

    Document doc;
    while ((doc = readDocument()) != null) {
        w.addDocument(IndexUtils.updateDocument(doc, context, false));

        n++;
    }

    w.flush();
    w.optimize();

    IndexDataReadResult result = new IndexDataReadResult();
    result.setDocumentCount(n);
    result.setTimestamp(date);
    return result;
}

From source file:proj.zoie.api.impl.ZoieMergePolicy.java

License:Apache License

/** Returns true if this single nfo is optimized (has no
 *  pending norms or deletes, is in the same dir as the
 *  writer, and matches the current compound file setting */
@Override/*from w  w w .  j a v a 2  s. c  o  m*/
protected boolean isMerged(SegmentInfo info) throws IOException {
    IndexWriter w = writer.get();
    return !info.hasDeletions() && !info.hasSeparateNorms() && info.dir == w.getDirectory()
            && info.getUseCompoundFile() == getUseCompoundFile();
}

From source file:psidev.psi.mi.search.index.impl.InteractorIndexWriter.java

License:Apache License

@Override
public void addBinaryInteractionToIndex(IndexWriter indexWriter, BinaryInteraction binaryInteraction)
        throws IOException, MitabLineException {
    ColumnBasedDocumentDefinition docDefinition = getDocumentBuilder().getDocumentDefinition();

    BinaryInteraction copy1 = binaryInteractionHandler.cloneBinaryInteraction(binaryInteraction);
    BinaryInteraction copy2 = binaryInteractionHandler.cloneBinaryInteraction(binaryInteraction);

    SearchEngine searchEngine = createSearchEngine(indexWriter.getDirectory(), indexWriter);
    // interactor A
    indexBinaryInteraction(indexWriter, searchEngine, copy1, docDefinition);
    //IndexSearcher sees index as it was when it was opened, if it gets modified
    // the searcher needs to be reopened:
    searchEngine.close();//  w  w w .j a va2 s .  c om
    searchEngine = createSearchEngine(indexWriter.getDirectory(), indexWriter);
    // invert interaction interactors
    invertInteractors(copy2);

    // interactor B
    indexBinaryInteraction(indexWriter, searchEngine, copy2, docDefinition);
    searchEngine.close();
}