Example usage for org.apache.lucene.index IndexWriter close

List of usage examples for org.apache.lucene.index IndexWriter close

Introduction

In this page you can find the example usage for org.apache.lucene.index IndexWriter close.

Prototype

@Override
public void close() throws IOException 

Source Link

Document

Closes all open resources and releases the write lock.

Usage

From source file:com.appeligo.amazon.ProgramIndexer.java

License:Apache License

private void close(IndexWriter writer) {
    if (writer != null) {
        try {/*from w w w  .j a  v a 2 s. c o m*/
            writer.close();
        } catch (IOException e) {
            log.warn("Cannot close index writer.", e);
        }
    }
}

From source file:com.appeligo.lucene.IndexerQueue.java

License:Apache License

private synchronized void indexDocuments(List<QueueAction> actions, boolean optimize) throws IOException {
    IndexWriter indexWriter = createIndexWriter();
    try {/* w w w  .  j  a  v a2s  .c o  m*/
        for (QueueAction action : actions) {
            if (log.isDebugEnabled()) {
                log.debug("Processing document " + action);
            }
            try {
                action.performAction(indexWriter);
            } catch (IOException e) {
                //If one fails try the rest?  
                log.error("Unable to process action: " + action, e);
            }
        }

        if (optimize) {
            //reset the next optimize time
            Calendar calendar = Calendar.getInstance();
            calendar.setTimeInMillis(nextOptimizeTime);
            calendar.add(Calendar.MINUTE, optimizeDuration);
            nextOptimizeTime = calendar.getTimeInMillis();

            log.info("Optimizing index for " + indexLocation);
            indexWriter.optimize();
            log.info("Finished with optimization for " + indexLocation);
        }
    } finally {
        indexWriter.close();
    }
}

From source file:com.appspot.socialinquirer.server.service.impl.AnalysisServiceImpl.java

License:Apache License

@Override
public List<Tag> getTermVector(String title, String text) {
    RAMDirectory directory = null;//from w w w . ja v  a2 s.  c o  m
    IndexReader reader = null;
    Map<String, Tag> tagsMap = new HashMap<String, Tag>();

    try {
        directory = new RAMDirectory();

        IndexWriter writer = new IndexWriter(directory, new StandardAnalyzer(Version.LUCENE_CURRENT), true,
                MaxFieldLength.UNLIMITED);
        Document doc = new Document();

        doc.add(new Field("title", title, Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.YES));
        doc.add(new Field("body", stripHtmlTags(text, true), Field.Store.YES, Field.Index.ANALYZED,
                Field.TermVector.YES));
        writer.addDocument(doc);

        writer.close();
        reader = IndexReader.open(directory, true);
        int numDocs = reader.maxDoc();
        for (int i = 0; i < numDocs; i++) {
            TermFreqVector termFreqVector = reader.getTermFreqVector(i, "title");
            pullTags(termFreqVector, tagsMap);
            termFreqVector = reader.getTermFreqVector(i, "body");
            pullTags(termFreqVector, tagsMap);
        }

    } catch (Exception e) {
        logger.log(Level.SEVERE, "An error occured while pulling tags from text.", e);
    } finally {
        closeIndexReader(reader);
        closeRAMDirectory(directory);
    }
    ArrayList<Tag> tagsList = new ArrayList<Tag>(tagsMap.values());
    Collections.sort(tagsList, new Comparator<Tag>() {
        @Override
        public int compare(Tag o1, Tag o2) {
            return o2.getFreqency() - o1.getFreqency();
        }
    });

    return tagsList;
}

From source file:com.aurel.track.lucene.index.LuceneIndexer.java

License:Open Source License

/**
 * Initializes an IndexWriter.// w w  w .j a v  a 2s .  c o m
 * It will be called from the following places:
 *    -   on system startup workItemWriter should be initialized!
 *                   created = false if no reindex at startup
 *                   created = true if reindex at startup
 *    -   before explicit reIndexing: created = true
 *    -   after reindexing: create = false;
 *    During the adding/editing/deleting of index data the IndexWriter should be initialized with created = false!
 * @param created
 * @param index
 */
public static IndexWriter initWriter(boolean created, int index) {
    Directory indexDirectory = LuceneUtil.getIndexDirectory(index);
    if (indexDirectory == null) {
        LOGGER.error("Can not find or create the index directory for workitems");
        return null;
    }
    Analyzer analyzer = LuceneUtil.getAnalyzer();
    if (analyzer == null) {
        LOGGER.error("Analyzer is null");
        return null;
    }
    IndexWriter indexWriter = getIndexWriter(index);
    if (indexWriter != null) {
        try {
            //release the lock
            indexWriter.close();
        } catch (IOException e) {
            LOGGER.error("Closing the IndexWriter for index " + index + " failed with " + e.getMessage());
            LOGGER.debug(ExceptionUtils.getStackTrace(e));
        }
    }
    try {
        IndexWriterConfig indexWriterConfig = new IndexWriterConfig(analyzer);
        OpenMode openMode = null;
        if (created) {
            openMode = OpenMode.CREATE;
        } else {
            openMode = OpenMode.APPEND;
        }
        indexWriterConfig.setOpenMode(openMode);
        indexWriter = new IndexWriter(indexDirectory, indexWriterConfig);
    } catch (OverlappingFileLockException e) {
        if (LOGGER.isDebugEnabled()) {
            LOGGER.debug("OverlappingFileLockException " + e.getMessage());
            LOGGER.debug(ExceptionUtils.getStackTrace(e));
        }
        if (!created) {
            //try again this time with created = true
            try {
                //open for create
                IndexWriterConfig indexWriterConfig = new IndexWriterConfig(analyzer);
                indexWriterConfig.setOpenMode(OpenMode.CREATE);
                //indexWriter = new IndexWriter(indexDirectory, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
                indexWriter = new IndexWriter(indexDirectory, indexWriterConfig);
                //close it in order to reopen it for modifications
                indexWriter.close();
            } catch (IOException e1) {
                LOGGER.error("Creating the IndexWriter for index " + index + " failed with " + e.getMessage());
                LOGGER.debug(ExceptionUtils.getStackTrace(e));
            }
            //try again this time with created = false
            try {
                IndexWriterConfig indexWriterConfig = new IndexWriterConfig(analyzer);
                indexWriterConfig.setOpenMode(OpenMode.APPEND);
                //indexWriter = new IndexWriter(indexDirectory, analyzer, false, IndexWriter.MaxFieldLength.UNLIMITED);
                indexWriter = new IndexWriter(indexDirectory, indexWriterConfig);
            } catch (IOException e1) {
                LOGGER.error("Creating the IndexWriter for index " + index + " failed with " + e.getMessage());
                LOGGER.debug(ExceptionUtils.getStackTrace(e));
            }
        } else {
            LOGGER.error("Creating the IndexWriter for index " + index + " failed with " + e.getMessage());
            LOGGER.debug(ExceptionUtils.getStackTrace(e));
        }
    } catch (IOException e) {
        //tried probably with created = false, when the index structure doesn't exist yet
        //it is the case when by startup the useLucene is active but reindexOnStartup not
        //we should try to open the writers with false (for modifications) in order to not to destroy the possible existing index
        //but when the index doesn't exists yet the opening of the writer with false fails. And this is the case now.
        if (!created) {
            //try again this time with created = true
            try {
                //open for create
                IndexWriterConfig indexWriterConfig = new IndexWriterConfig(analyzer);
                indexWriterConfig.setOpenMode(OpenMode.CREATE);
                //indexWriter = new IndexWriter(indexDirectory, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
                indexWriter = new IndexWriter(indexDirectory, indexWriterConfig);
                //close it in order to reopen it for modifications
                indexWriter.close();
            } catch (IOException e1) {
                LOGGER.error("Creating the IndexWriter for index " + index + " failed with " + e.getMessage());
                LOGGER.debug(ExceptionUtils.getStackTrace(e));
            }
            //try again this time with created = false
            try {
                IndexWriterConfig indexWriterConfig = new IndexWriterConfig(analyzer);
                indexWriterConfig.setOpenMode(OpenMode.APPEND);
                //indexWriter = new IndexWriter(indexDirectory, analyzer, false, IndexWriter.MaxFieldLength.UNLIMITED);
                indexWriter = new IndexWriter(indexDirectory, indexWriterConfig);
            } catch (IOException e1) {
                LOGGER.error("Creating the IndexWriter for index " + index + " failed with " + e.getMessage());
                LOGGER.debug(ExceptionUtils.getStackTrace(e));
            }
        } else {
            LOGGER.error("Creating the IndexWriter for index " + index + " failed with " + e.getMessage());
            LOGGER.debug(ExceptionUtils.getStackTrace(e));
        }
    }
    switch (index) {
    case LuceneUtil.INDEXES.WORKITEM_INDEX:
        workItemWriter = indexWriter;
        break;
    case LuceneUtil.INDEXES.NOT_LOCALIZED_LIST_INDEX:
        notLocalizedLookupWriter = indexWriter;
        break;
    case LuceneUtil.INDEXES.LOCALIZED_LIST_INDEX:
        localizedLookupWriter = indexWriter;
        break;
    case LuceneUtil.INDEXES.EXTERNAL_LOOKUP_WRITER:
        externalLookupWriter = indexWriter;
        break;
    case LuceneUtil.INDEXES.ATTACHMENT_INDEX:
        attachmentWriter = indexWriter;
        break;
    case LuceneUtil.INDEXES.EXPENSE_INDEX:
        expenseWriter = indexWriter;
        break;
    case LuceneUtil.INDEXES.BUDGET_PLAN_INDEX:
        budgetPlanWriter = indexWriter;
        break;
    case LuceneUtil.INDEXES.LINK_INDEX:
        linkWriter = indexWriter;
        break;
    default:
        return null;
    }
    return indexWriter;
}

From source file:com.b2international.index.lucene.NullIndexSearcher.java

License:Apache License

private static DirectoryReader createRamReader() throws IOException {

    final RAMDirectory directory = new RAMDirectory();
    if (!DirectoryReader.indexExists(directory)) {

        final IndexWriterConfig conf = new IndexWriterConfig(new WhitespaceAnalyzer());
        final IndexWriter writer = new IndexWriter(directory, conf);
        writer.commit();//  w w  w . j  a v a2 s .c om
        writer.close();
    }

    return DirectoryReader.open(directory);

}

From source file:com.bah.lucene.blockcache_v2.CacheDirectoryTest.java

License:Apache License

@Test
public void test3() throws IOException, InterruptedException {
    // Thread.sleep(30000);
    IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_43, new KeywordAnalyzer());
    IndexWriter writer = new IndexWriter(_cacheDirectory, conf);
    int docs = 100000;
    for (int i = 0; i < docs; i++) {
        if (i % 500 == 0) {
            System.out.println(i);
        }/*from  w w  w .  j  a  v a  2 s. com*/
        writer.addDocument(newDoc());
        // Thread.sleep(1);
    }
    writer.close();
    System.out.println("done writing");

    DirectoryReader reader = DirectoryReader.open(_cacheDirectory);
    System.out.println("done opening");
    assertEquals(docs, reader.numDocs());

    Document document = reader.document(0);
    System.out.println("done fetching");
    System.out.println(document);

    IndexSearcher searcher = new IndexSearcher(reader);
    TopDocs topDocs = searcher.search(new TermQuery(new Term("test", "test")), 10);
    System.out.println("done searching");
    assertEquals(docs, topDocs.totalHits);

    reader.close();
}

From source file:com.baidu.rigel.biplatform.tesseract.isservice.index.service.IndexWriterFactory.java

License:Open Source License

/**
 * destoryWriters//from ww w. j  a v a2 s  .c om
 * 
 * @param idxPath
 *            
 * @throws IOException
 *             IO
 */
public static void destoryWriters(String idxPath) throws IOException {
    LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_BEGIN, "destoryWriters",
            "[idxPath:" + idxPath + "]"));
    IndexWriter indexWriter = null;
    if (INSTANCE.idxWriterMaps.containsKey(idxPath)) {
        indexWriter = INSTANCE.idxWriterMaps.get(idxPath);

        try {
            indexWriter.commit();
            indexWriter.close();
        } catch (IOException e) {
            if (IndexWriter.isLocked(indexWriter.getDirectory())) {
                IndexWriter.unlock(indexWriter.getDirectory());
            }
        }
        INSTANCE.idxWriterMaps.remove(idxPath);
    }

    LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_END, "destoryWriters",
            "[idxPath:" + idxPath + "]"));
}

From source file:com.baidu.rigel.biplatform.tesseract.isservice.index.service.IndexWriterFactory.java

License:Open Source License

/**
 * /*from  ww  w .ja  va  2s. com*/
 * indexWriter
 * 
 * @throws IOException
 *             IO
 */
public static synchronized void destoryAllWriters() throws IOException {
    LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_BEGIN, "destoryAllWriters", "[no param]"));
    for (String key : INSTANCE.idxWriterMaps.keySet()) {
        IndexWriter writer = INSTANCE.idxWriterMaps.get(key);
        try {
            writer.commit();
            writer.close();
        } catch (IOException e) {
            if (IndexWriter.isLocked(writer.getDirectory())) {
                IndexWriter.unlock(writer.getDirectory());
            }

        }
        INSTANCE.idxWriterMaps.remove(key);
    }

    LOGGER.info(String.format(LogInfoConstants.INFO_PATTERN_FUNCTION_END, "destoryAllWriters", "[no param]"));
}

From source file:com.baidu.rigel.biplatform.tesseract.isservice.netty.service.IndexServerHandler.java

License:Open Source License

public void messageReceived_00(ChannelHandlerContext ctx, Object msg) throws Exception {
    logger.info(String.format(LogInfoConstants.INFO_PATTERN_MESSAGE_RECEIVED_BEGIN, "IndexServerHandler"));
    IndexMessage indexMsg = (IndexMessage) msg;
    // ??/*  w ww  .jav a 2s.c  om*/
    File idxFile = new File(indexMsg.getIdxPath());
    File idxServiceFile = new File(indexMsg.getIdxServicePath());

    if (indexMsg.getMessageHeader().getAction().equals(NettyAction.NETTY_ACTION_UPDATE)
            || indexMsg.getMessageHeader().getAction().equals(NettyAction.NETTY_ACTION_INITINDEX)) {
        // ??
        // ?
        FileUtils.deleteFile(idxFile);
        if (indexMsg.getMessageHeader().getAction().equals(NettyAction.NETTY_ACTION_UPDATE)
                && idxServiceFile.exists()) {
            // ?
            FileUtils.copyFolder(indexMsg.getIdxServicePath(), indexMsg.getIdxPath());
        }
    }

    IndexWriter idxWriter = IndexWriterFactory.getIndexWriterWithSingleSlot(indexMsg.getIdxPath());

    TesseractResultSet data = indexMsg.getDataBody();
    long currDiskSize = FileUtils.getDiskSize(indexMsg.getIdxPath());
    BigDecimal currMaxId = null;
    // ??
    if (currDiskSize < indexMsg.getBlockSize()) {
        while (data.next() && currDiskSize < indexMsg.getBlockSize()) {
            Document doc = new Document();
            String[] fieldNameArr = data.getFieldNameArray();
            for (String select : fieldNameArr) {
                if (select.equals(indexMsg.getIdName())) {
                    currMaxId = data.getBigDecimal(select);
                }

                doc.add(new StringField(select, data.getString(select), Field.Store.NO));
            }

            idxWriter.addDocument(doc);
        }
        idxWriter.commit();
        idxWriter.close();

    }

    String feedBackIndexServicePath = null;
    String feedBackIndexFilePath = null;

    // ? or ???indexWriter\?
    long totalDiskSize = FileUtils.getDiskSize(indexMsg.getIdxPath());
    if (totalDiskSize > indexMsg.getBlockSize() || indexMsg.isLastPiece()) {
        IndexWriterFactory.destoryWriters(indexMsg.getIdxPath());
        feedBackIndexServicePath = indexMsg.getIdxPath();
        feedBackIndexFilePath = indexMsg.getIdxServicePath();
    } else {
        feedBackIndexServicePath = indexMsg.getIdxServicePath();
        feedBackIndexFilePath = indexMsg.getIdxPath();
    }

    MessageHeader messageHeader = new MessageHeader(NettyAction.NETTY_ACTION_INDEX_FEEDBACK);

    IndexMessage indexFeedbackMsg = new IndexMessage(messageHeader, indexMsg.getDataBody());
    indexFeedbackMsg.setBlockSize(indexMsg.getBlockSize());
    indexFeedbackMsg.setDiskSize(totalDiskSize);
    indexFeedbackMsg.setIdxServicePath(feedBackIndexServicePath);
    indexFeedbackMsg.setIdxPath(feedBackIndexFilePath);
    indexFeedbackMsg.setIdName(indexMsg.getIdName());
    indexFeedbackMsg.setMaxId(currMaxId);
    ctx.writeAndFlush(indexFeedbackMsg);
    ctx.channel().close();
    logger.info(String.format(LogInfoConstants.INFO_PATTERN_MESSAGE_RECEIVED_END, "IndexServerHandler"));
}

From source file:com.bala.learning.learning.luence.IndexFiles.java

License:Apache License

/** Index all text files under a directory. */
public static void main(String[] args) {
    String usage = "java org.apache.lucene.demo.IndexFiles"
            + " [-index INDEX_PATH] [-docs DOCS_PATH] [-update]\n\n"
            + "This indexes the documents in DOCS_PATH, creating a Lucene index"
            + "in INDEX_PATH that can be searched with SearchFiles";
    String indexPath = "index";
    String docsPath = null;//  w ww .  j  a  va 2 s .c  om
    boolean create = true;
    for (int i = 0; i < args.length; i++) {
        if ("-index".equals(args[i])) {
            indexPath = args[i + 1];
            i++;
        } else if ("-docs".equals(args[i])) {
            docsPath = args[i + 1];
            i++;
        } else if ("-update".equals(args[i])) {
            create = false;
        }
    }

    if (docsPath == null) {
        System.err.println("Usage: " + usage);
        System.exit(1);
    }

    final Path docDir = Paths.get(docsPath);
    if (!Files.isReadable(docDir)) {
        System.out.println("Document directory '" + docDir.toAbsolutePath()
                + "' does not exist or is not readable, please check the path");
        System.exit(1);
    }

    Date start = new Date();
    try {
        System.out.println("Indexing to directory '" + indexPath + "'...");

        Directory dir = FSDirectory.open(Paths.get(indexPath));
        Analyzer analyzer = new StandardAnalyzer();
        IndexWriterConfig iwc = new IndexWriterConfig(analyzer);

        if (create) {
            // Create a new index in the directory, removing any
            // previously indexed documents:
            iwc.setOpenMode(OpenMode.CREATE);
        } else {
            // Add new documents to an existing index:
            iwc.setOpenMode(OpenMode.CREATE_OR_APPEND);
        }

        // Optional: for better indexing performance, if you
        // are indexing many documents, increase the RAM
        // buffer.  But if you do this, increase the max heap
        // size to the JVM (eg add -Xmx512m or -Xmx1g):
        //
        // iwc.setRAMBufferSizeMB(256.0);

        IndexWriter writer = new IndexWriter(dir, iwc);
        indexDocs(writer, docDir);

        // NOTE: if you want to maximize search performance,
        // you can optionally call forceMerge here.  This can be
        // a terribly costly operation, so generally it's only
        // worth it when your index is relatively static (ie
        // you're done adding documents to it):
        //
        // writer.forceMerge(1);

        writer.close();

        Date end = new Date();
        System.out.println(end.getTime() - start.getTime() + " total milliseconds");

    } catch (IOException e) {
        System.out.println(" caught a " + e.getClass() + "\n with message: " + e.getMessage());
    }
}