Example usage for org.apache.lucene.index IndexWriter close

List of usage examples for org.apache.lucene.index IndexWriter close

Introduction

In this page you can find the example usage for org.apache.lucene.index IndexWriter close.

Prototype

@Override
public void close() throws IOException 

Source Link

Document

Closes all open resources and releases the write lock.

Usage

From source file:axiom.objectmodel.dom.convert.LuceneConvertor.java

License:Open Source License

public void convert(Application app, File dbhome) throws Exception {
    FSDirectory indexDir = FSDirectory.getDirectory(dbhome, false);
    if (indexDir instanceof TransFSDirectory) {
        FSDirectory.setDisableLocks(true);
        TransFSDirectory d = (TransFSDirectory) indexDir;
        TransSource source = app.getTransSource();
        d.setDriverClass(source.getDriverClass());
        d.setUrl(source.getUrl());//from   w  w  w  .  ja  v a 2  s .  co m
        d.setUser(source.getUser());
        d.setPassword(source.getPassword());
    }
    File ndbhome = new File(dbhome.getParentFile(), dbhome.getName() + "_tmp");
    File olddbhome = new File(dbhome.getParentFile(), dbhome.getName() + "_old");
    FSDirectory nindexDir = FSDirectory.getDirectory(ndbhome, true);
    if (nindexDir instanceof TransFSDirectory) {
        FSDirectory.setDisableLocks(true);
        TransFSDirectory d = (TransFSDirectory) nindexDir;
        TransSource source = app.getTransSource();
        d.setDriverClass(source.getDriverClass());
        d.setUrl(source.getUrl());
        d.setUser(source.getUser());
        d.setPassword(source.getPassword());
    }

    IndexSearcher searcher = null;
    IndexWriter writer = null;
    LuceneManager lmgr = null;

    try {
        searcher = new IndexSearcher(indexDir);
        PerFieldAnalyzerWrapper a = LuceneManager.buildAnalyzer();
        writer = IndexWriterManager.getWriter(nindexDir, a, true);
        final int numDocs = searcher.getIndexReader().numDocs();

        HashSet deldocs = new HashSet();
        HashMap infos = new HashMap();
        for (int i = 0; i < numDocs; i++) {
            Document doc = searcher.doc(i);
            String delprop = doc.get(DeletedInfos.DELETED);
            String layerStr = doc.get(LuceneManager.LAYER_OF_SAVE);
            int layer = -1;
            try {
                layer = Integer.parseInt(layerStr);
            } catch (Exception ex) {
                layer = -1;
            }
            final String id = doc.get(LuceneManager.ID) + DeletedInfos.KEY_SEPERATOR
                    + doc.get(LuceneManager.LAYER_OF_SAVE);
            if (delprop != null && "true".equals(delprop)/* && layer == DbKey.LIVE_LAYER*/) {
                deldocs.add(id);
            } else {
                Object v;
                if ((v = infos.get(id)) == null) {
                    infos.put(id, new Integer(i));
                } else {
                    final String lmod = doc.get(LuceneManager.LASTMODIFIED);
                    final String lmod_prev = searcher.doc(((Integer) v).intValue()).get("_lastmodified");
                    if (lmod_prev == null || (lmod != null && lmod.compareTo(lmod_prev) > 0)) {
                        infos.put(id, new Integer(i));
                    }
                }
            }
        }

        ArrayList listOfMaps = new ArrayList();

        for (int i = 0; i < numDocs; i++) {
            Document doc = searcher.doc(i);
            String delprop = doc.get(DeletedInfos.DELETED);
            String layerStr = doc.get(LuceneManager.LAYER_OF_SAVE);
            int layer = -1;
            try {
                layer = Integer.parseInt(layerStr);
            } catch (Exception ex) {
                layer = -1;
            }
            final String id = doc.get(LuceneManager.ID) + DeletedInfos.KEY_SEPERATOR
                    + doc.get(LuceneManager.LAYER_OF_SAVE);
            if (delprop != null && "true".equals(delprop)) {
                continue;
            } else if (id != null && deldocs.contains(id)/* && layer == DbKey.LIVE_LAYER*/) {
                continue;
            }

            Integer idx = (Integer) infos.get(id);
            if (idx != null && i != idx.intValue()) {
                continue;
            }

            Document ndoc = convertDocument(doc);

            if (this.recordNodes) {
                listOfMaps.add(LuceneManager.luceneDocumentToMap(doc));
            }

            if (ndoc != null) {
                writer.addDocument(ndoc);
            }
        }

        if (this.recordNodes) {
            lmgr = new LuceneManager(this.app, false, true);
            this.allNodes = new HashMap();
            final int size = listOfMaps.size();
            for (int i = 0; i < size; i++) {
                HashMap m = (HashMap) listOfMaps.get(i);
                INode n = lmgr.mapToNode(m);
                this.allNodes.put(n.getID(), getPath(n));
                n = null;
            }
        }

    } catch (Exception ex) {
        ex.printStackTrace();
        throw new RuntimeException(ex);
    } finally {
        if (searcher != null) {
            try {
                searcher.close();
            } catch (Exception ex) {
                app.logError(ErrorReporter.errorMsg(this.getClass(), "convert"), ex);
            }
        }

        if (lmgr != null) {
            lmgr.shutdown();
            lmgr = null;
        }

        indexDir.close();
        SegmentInfos sinfos = IndexObjectsFactory.getFSSegmentInfos(indexDir);
        sinfos.clear();
        IndexObjectsFactory.removeDeletedInfos(indexDir);
    }

    Connection conn = null;
    boolean exceptionOccured = false;

    try {
        if (writer != null) {
            TransSource ts = app.getTransSource();
            conn = ts.getConnection();

            DatabaseMetaData dmd = conn.getMetaData();
            ResultSet rs = dmd.getColumns(null, null, "Lucene", "version");
            if (!rs.next()) {
                final String alterTbl = "ALTER TABLE Lucene ADD version INT NOT NULL DEFAULT 1";
                PreparedStatement pstmt = null;
                try {
                    pstmt = conn.prepareStatement(alterTbl);
                    pstmt.execute();
                } catch (SQLException sqle) {
                    app.logError(ErrorReporter.errorMsg(this.getClass(), "convert"), sqle);
                } finally {
                    if (pstmt != null) {
                        pstmt.close();
                        pstmt = null;
                    }
                }
            }
            rs.close();
            rs = null;

            writer.close();
            writer.flushCache();//TODO:writer.writeSegmentsFile();
            LuceneManager.commitSegments(conn, app, writer.getDirectory());
            writer.finalizeTrans();

            this.updateSQL(conn);
        }
    } catch (Exception ex) {
        ex.printStackTrace();
        exceptionOccured = true;
        throw new RuntimeException(ex);
    } finally {
        if (conn != null) {
            try {
                if (!conn.getAutoCommit()) {
                    if (!exceptionOccured) {
                        conn.commit();
                    } else {
                        conn.rollback();
                    }
                }
                conn.close();
            } catch (Exception ex) {
                app.logError(ErrorReporter.errorMsg(this.getClass(), "convert"), ex);
            }
            conn = null;
        }

        nindexDir.close();
        SegmentInfos sinfos = IndexObjectsFactory.getFSSegmentInfos(nindexDir);
        sinfos.clear();
        IndexObjectsFactory.removeDeletedInfos(nindexDir);
    }

    if (!dbhome.renameTo(olddbhome)) {
        throw new Exception("Could not move the old version of the db into " + olddbhome);
    }

    if (!ndbhome.renameTo(dbhome)) {
        throw new Exception("Could not move the newer version of the db into " + dbhome);
    }

    File oldBlobDir = new File(olddbhome, "blob");
    File newBlobDir = new File(ndbhome, "blob");
    oldBlobDir.renameTo(newBlobDir);

    if (!FileUtils.deleteDir(olddbhome)) {
        throw new Exception("Could not delete the old version of the db at " + olddbhome);
    }
}

From source file:axiom.objectmodel.dom.IndexWriterManager.java

License:Open Source License

public IndexWriter releaseWriter(IndexWriter writer) throws Exception {
    if (writer != null) {
        try {/*from www  .jav  a2s . c o m*/
            writer.close();
        } catch (Exception ex) {
            this.app.logError(ErrorReporter.errorMsg(this.getClass(), "releaseWriter"), ex);
            throw new DatabaseException(
                    "ERROR in IndexWriterManager.releaseWriter(): Could not release the index writer");
        }
    }
    return writer;
}

From source file:back.Indexer.java

License:Apache License

/** Index all text files under a directory. */
public static void main(String[] args) {
    String usage = "java org.apache.lucene.demo.IndexFiles"
            + " [-index INDEX_PATH] [-docs DOCS_PATH] [-update]\n\n"
            + "This indexes the documents in DOCS_PATH, creating a Lucene index"
            + "in INDEX_PATH that can be searched with SearchFiles";
    String indexPath = ".\\indexed";
    String docsPath = ".//artigos";
    boolean create = true;
    for (int i = 0; i < args.length; i++) {
        if ("-index".equals(args[i])) {
            indexPath = args[i + 1];/*from   ww w . j a  va2  s.c o  m*/
            i++;
        } else if ("-docs".equals(args[i])) {
            docsPath = args[i + 1];
            i++;
        } else if ("-update".equals(args[i])) {
            create = false;
        }
    }

    if (docsPath == null) {
        System.err.println("Usage: " + usage);
        System.exit(1);
    }

    final File docDir = new File(docsPath);
    if (!docDir.exists() || !docDir.canRead()) {
        System.out.println("Document directory '" + docDir.getAbsolutePath()
                + "' does not exist or is not readable, please check the path");
        System.exit(1);
    }

    Date start = new Date();
    try {
        System.out.println("Indexing to directory '" + indexPath + "'...");

        Directory dir = FSDirectory.open(new File(indexPath));
        Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_CURRENT,
                new CharArraySet(Version.LUCENE_CURRENT, 0, false));
        IndexWriterConfig iwc = new IndexWriterConfig(Version.LUCENE_40, analyzer);

        if (create) {
            // Create a new index in the directory, removing any
            // previously indexed documents:
            iwc.setOpenMode(OpenMode.CREATE);
        } else {
            // Add new documents to an existing index:
            iwc.setOpenMode(OpenMode.CREATE_OR_APPEND);
        }

        // Optional: for better indexing performance, if you
        // are indexing many documents, increase the RAM
        // buffer.  But if you do this, increase the max heap
        // size to the JVM (eg add -Xmx512m or -Xmx1g):
        //
        // iwc.setRAMBufferSizeMB(256.0);

        IndexWriter writer = new IndexWriter(dir, iwc);
        indexDocs(writer, docDir);

        // NOTE: if you want to maximize search performance,
        // you can optionally call forceMerge here.  This can be
        // a terribly costly operation, so generally it's only
        // worth it when your index is relatively static (ie
        // you're done adding documents to it):
        //
        // writer.forceMerge(1);

        writer.close();

        Date end = new Date();
        System.out.println(end.getTime() - start.getTime() + " total milliseconds");

    } catch (IOException e) {
        System.out.println(" caught a " + e.getClass() + "\n with message: " + e.getMessage());
    }
}

From source file:bajavista.IndiceInvertido.java

public void crearIndiceInvertido() throws IOException {
    // 0. Specify the analyzer for tokenizing text.
    //    The same analyzer should be used for indexing and searching
    StandardAnalyzer analyzer = new StandardAnalyzer(Version.LUCENE_43);

    // 1. Create the index 
    File indexDirES = new File(dirIndexES);
    Directory indexES = FSDirectory.open(indexDirES);
    //File indexDirNONES = new File(dirIndexNONES);
    //Directory indexNONES = FSDirectory.open(indexDirNONES);

    IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_43, analyzer);

    IndexWriter wES = new IndexWriter(indexES, config);
    //IndexWriter wNONES = new IndexWriter(indexNONES, config);

    ConexionBD db = new ConexionBD();
    try {//from w w  w.j  a v  a2s.c  o m
        try (PreparedStatement consulta = db.getConnection().prepareStatement("SELECT * FROM Tweet");
                ResultSet res = consulta.executeQuery()) {
            while (res.next()) {
                //System.out.println(res.getString("idUser") +" "+ res.getString("timestamp") +" "+ res.getString("text") +" "+ res.getString("objective") +" "+ res.getString("subjective") +" "+ res.getString("positive") +" "+ res.getString("negative") +" "+ res.getString("need"));
                agregarDoc(wES, res.getString("idUser"), res.getString("timestamp"), res.getString("text"),
                        res.getString("objective"), res.getString("subjective"), res.getString("positive"),
                        res.getString("negative"), res.getString("need"));
            }

        }

    } catch (Exception e) {
        System.out.print("No se pudo consultar a la base de datos\n" + e);
    }

    //    try {
    //            File f = new File(baseDatosNONES);
    //            FileReader fr = new FileReader(f);
    //            BufferedReader br = new BufferedReader(fr);
    //            String linea = br.readLine();
    //
    //            while ((linea = br.readLine()) != null) {
    //      StringTokenizer separarLinea = new StringTokenizer(linea, "|");
    //      String next = separarLinea.nextToken();;
    //                String next1 = separarLinea.nextToken();;
    //                String next2 = separarLinea.nextToken();;
    //                
    //                addDoc(wNONES, next, next1,next2);
    //            }
    //
    //            fr.close();
    //        } catch (Exception e) {
    //            System.out.println("Error en la lectura del archivo...");
    //        }
    db.desconectar();

    wES.close();
    //wNONES.close();
}

From source file:be.iRail.BeLaws.Indexer.java

License:Apache License

/** Index all text files under a directory. */
private void index() {
    INDEX_DIR = new File(indexpath);
    final File docDir = new File(lawspath);
    if (!docDir.exists() || !docDir.canRead()) {
        System.out.println("Document directory '" + docDir.getAbsolutePath()
                + "' does not exist or is not readable, please check the path");

    } else {//from  w  w w  .j  a  v a 2 s.c om

        Date start = new Date();
        try {
            IndexWriter writer = new IndexWriter(FSDirectory.open(INDEX_DIR),
                    new StandardAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
            System.out.println("Indexing to directory '" + INDEX_DIR + "'...");
            indexDocs(writer, docDir);
            System.out.println("Optimizing...");
            writer.optimize();
            writer.close();

            Date end = new Date();
            System.out.println(end.getTime() - start.getTime() + " total milliseconds");

        } catch (IOException e) {
            System.out.println(" caught a " + e.getClass() + "\n with message: " + e.getMessage());
        }
    }
}

From source file:BlockBuilding.AbstractBlockBuilding.java

License:Apache License

protected void closeWriter(IndexWriter iWriter) {
    try {//from www .jav a2s .c o  m
        iWriter.close();
    } catch (IOException ex) {
        LOGGER.log(Level.SEVERE, null, ex);
    }
}

From source file:BlockBuilding.AbstractIndexBasedMethod.java

License:Open Source License

protected void closeWriter(IndexWriter iWriter) {
    try {/*from  ww  w. j  a  va 2s  .co m*/
        iWriter.close();
    } catch (IOException ex) {
        ex.printStackTrace();
    }
}

From source file:br.andrew.lucene.testing.IndexFiles.java

License:Apache License

/** Index all text files under a directory. */
public static void main(final String[] args) {
    final String usage = "java org.apache.lucene.demo.IndexFiles"
            + " [-index INDEX_PATH] [-docs DOCS_PATH] [-update]\n\n"
            + "This indexes the documents in DOCS_PATH, creating a Lucene index"
            + "in INDEX_PATH that can be searched with SearchFiles";
    final String indexPath = "index";

    final File docDir = new File("data");

    final Date start = new Date();
    try {/*  w w  w .  j av a  2 s  .  c  o m*/
        System.out.println("Indexing to directory '" + indexPath + "'...");

        final Directory dir = FSDirectory.open(new File(indexPath));
        final Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_40);
        final IndexWriterConfig iwc = new IndexWriterConfig(Version.LUCENE_40, analyzer);

        iwc.setOpenMode(OpenMode.CREATE);

        // Optional: for better indexing performance, if you
        // are indexing many documents, increase the RAM
        // buffer.  But if you do this, increase the max heap
        // size to the JVM (eg add -Xmx512m or -Xmx1g):
        //
        // iwc.setRAMBufferSizeMB(256.0);

        final IndexWriter writer = new IndexWriter(dir, iwc);
        IndexFiles.indexDocs(writer, docDir);

        // NOTE: if you want to maximize search performance,
        // you can optionally call forceMerge here.  This can be
        // a terribly costly operation, so generally it's only
        // worth it when your index is relatively static (ie
        // you're done adding documents to it):
        //
        // writer.forceMerge(1);

        writer.close();

        final Date end = new Date();
        System.out.println(end.getTime() - start.getTime() + " total milliseconds");

    } catch (final IOException e) {
        System.out.println(" caught a " + e.getClass() + "\n with message: " + e.getMessage());
    }
}

From source file:br.bireme.ngrams.NGrams.java

public static void index(final NGIndex index, final NGSchema schema, final String inFile,
        final String inFileEncoding) throws IOException, ParseException {
    if (index == null) {
        throw new NullPointerException("index");
    }/*from  w ww  .j  a  v a  2  s  . co  m*/
    if (schema == null) {
        throw new NullPointerException("schema");
    }
    if (inFile == null) {
        throw new NullPointerException("inFile");
    }
    if (inFileEncoding == null) {
        throw new NullPointerException("inFileEncoding");
    }

    final Charset charset = Charset.forName(inFileEncoding);
    final IndexWriter writer = index.getIndexWriter(false);
    int cur = 0;

    try (BufferedReader reader = Files.newBufferedReader(new File(inFile).toPath(), charset)) {
        writer.deleteAll();

        while (true) {
            final String line;
            try {
                line = reader.readLine();
            } catch (MalformedInputException mie) {
                System.err.println("Line with another encoding. Line number:" + (++cur));
                continue;
            }
            if (line == null) {
                break;
            }
            final boolean ret = indexDocument(index, writer, schema, line, false);
            if (ret && (++cur % 100000 == 0)) {
                System.out.println(">>> " + cur);
            }
        }
        writer.forceMerge(1); // optimize index
        writer.close();
    }
}

From source file:br.com.crawlerspring.model.Searcher.java

public void prepareSearch() throws IOException {

    IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_40, analyzer);

    IndexWriter writer = new IndexWriter(index, config);

    for (br.com.crawlerspring.model.Document document : documentDao.getDocuments()) {
        addDoc(writer, document.getTitle(), document.getContent());
    }/*from  www .jav  a2  s. c om*/

    writer.close();
}