List of usage examples for org.apache.lucene.search IndexSearcher doc
public Document doc(int docID) throws IOException
.getIndexReader().document(docID)
From source file:au.org.ala.names.search.ALANameIndexer.java
License:Open Source License
private String getValueFromIndex(IndexSearcher is, String searchField, String value, String retField) { TermQuery tq = new TermQuery(new Term(searchField, value)); try {//w w w . j a va 2 s . c o m org.apache.lucene.search.TopDocs results = is.search(tq, 1); if (results.totalHits > 0) return is.doc(results.scoreDocs[0].doc).get(retField); } catch (IOException e) { e.printStackTrace(); } return value; }
From source file:axiom.db.utils.LuceneManipulator.java
License:Open Source License
public void compress(String dbDir) throws Exception { System.setProperty("org.apache.lucene.FSDirectory.class", "org.apache.lucene.store.TransFSDirectory"); File dbhome = new File(dbDir); String url = getUrl(dbhome);/*from w ww . j a v a 2 s . co m*/ FSDirectory indexDir = FSDirectory.getDirectory(dbhome, false); if (indexDir instanceof TransFSDirectory) { FSDirectory.setDisableLocks(true); TransFSDirectory d = (TransFSDirectory) indexDir; d.setDriverClass(DRIVER_CLASS); d.setUrl(url); d.setUser(null); d.setPassword(null); } File ndbhome = new File(dbhome.getParentFile(), dbhome.getName() + "_tmp"); File olddbhome = new File(dbhome.getParentFile(), dbhome.getName() + "_old"); FSDirectory nindexDir = FSDirectory.getDirectory(ndbhome, true); if (nindexDir instanceof TransFSDirectory) { FSDirectory.setDisableLocks(true); TransFSDirectory d = (TransFSDirectory) nindexDir; d.setDriverClass(DRIVER_CLASS); d.setUrl(url); d.setUser(null); d.setPassword(null); } IndexSearcher searcher = null; IndexWriter writer = null; LuceneManager lmgr = null; try { searcher = new IndexSearcher(indexDir); PerFieldAnalyzerWrapper a = LuceneManager.buildAnalyzer(); writer = IndexWriterManager.getWriter(nindexDir, a, true); final int numDocs = searcher.getIndexReader().numDocs(); HashSet deldocs = new HashSet(); HashMap infos = new HashMap(); for (int i = 0; i < numDocs; i++) { Document doc = searcher.doc(i); String delprop = doc.get(DeletedInfos.DELETED); final String id = doc.get(LuceneManager.ID) + DeletedInfos.KEY_SEPERATOR + doc.get(LuceneManager.LAYER_OF_SAVE); if (delprop != null && "true".equals(delprop)) { deldocs.add(id); } else { Object v; if ((v = infos.get(id)) == null) { infos.put(id, new Integer(i)); } else { final String lmod = doc.get(LuceneManager.LASTMODIFIED); final String lmod_prev = searcher.doc(((Integer) v).intValue()).get("_lastmodified"); if (lmod_prev == null || (lmod != null && lmod.compareTo(lmod_prev) > 0)) { infos.put(id, new Integer(i)); } } } } ArrayList listOfMaps = new ArrayList(); for (int i = 0; i < numDocs; i++) { Document doc = searcher.doc(i); String delprop = doc.get(DeletedInfos.DELETED); String layerStr = doc.get(LuceneManager.LAYER_OF_SAVE); int layer = -1; try { layer = Integer.parseInt(layerStr); } catch (Exception ex) { layer = -1; } final String id = doc.get(LuceneManager.ID) + DeletedInfos.KEY_SEPERATOR + doc.get(LuceneManager.LAYER_OF_SAVE); if (delprop != null && "true".equals(delprop)) { continue; } else if (id != null && deldocs.contains(id)) { continue; } Integer idx = (Integer) infos.get(id); if (idx != null && i != idx.intValue()) { continue; } Document ndoc = convertDocument(doc); if (ndoc != null) { writer.addDocument(ndoc); } } } catch (Exception ex) { ex.printStackTrace(); throw new RuntimeException(ex); } finally { if (searcher != null) { try { searcher.close(); } catch (Exception ex) { } } if (lmgr != null) { lmgr.shutdown(); lmgr = null; } indexDir.close(); SegmentInfos sinfos = IndexObjectsFactory.getFSSegmentInfos(indexDir); sinfos.clear(); IndexObjectsFactory.removeDeletedInfos(indexDir); } Connection conn = null; boolean exceptionOccured = false; try { if (writer != null) { conn = DriverManager.getConnection(url); conn.setAutoCommit(false); writer.close(); writer.flushCache(); LuceneManager.commitSegments(null, conn, dbhome, writer.getDirectory()); writer.finalizeTrans(); } } catch (Exception ex) { ex.printStackTrace(); exceptionOccured = true; throw new RuntimeException(ex); } finally { if (conn != null) { try { if (!conn.getAutoCommit()) { if (!exceptionOccured) { conn.commit(); } else { conn.rollback(); } } conn.close(); } catch (Exception ex) { ex.printStackTrace(); } conn = null; } nindexDir.close(); SegmentInfos sinfos = IndexObjectsFactory.getFSSegmentInfos(nindexDir); sinfos.clear(); IndexObjectsFactory.removeDeletedInfos(nindexDir); } File[] files = dbhome.listFiles(); for (int i = 0; i < files.length; i++) { if (!files[i].isDirectory()) { files[i].delete(); } } files = ndbhome.listFiles(); for (int i = 0; i < files.length; i++) { if (!files[i].isDirectory()) { File nfile = new File(dbhome, files[i].getName()); files[i].renameTo(nfile); } } if (!FileUtils.deleteDir(ndbhome)) { throw new Exception("Could not delete " + ndbhome); } }
From source file:axiom.objectmodel.dom.convert.LuceneConvertor.java
License:Open Source License
public void convert(Application app, File dbhome) throws Exception { FSDirectory indexDir = FSDirectory.getDirectory(dbhome, false); if (indexDir instanceof TransFSDirectory) { FSDirectory.setDisableLocks(true); TransFSDirectory d = (TransFSDirectory) indexDir; TransSource source = app.getTransSource(); d.setDriverClass(source.getDriverClass()); d.setUrl(source.getUrl());// w w w. j av a 2 s . c o m d.setUser(source.getUser()); d.setPassword(source.getPassword()); } File ndbhome = new File(dbhome.getParentFile(), dbhome.getName() + "_tmp"); File olddbhome = new File(dbhome.getParentFile(), dbhome.getName() + "_old"); FSDirectory nindexDir = FSDirectory.getDirectory(ndbhome, true); if (nindexDir instanceof TransFSDirectory) { FSDirectory.setDisableLocks(true); TransFSDirectory d = (TransFSDirectory) nindexDir; TransSource source = app.getTransSource(); d.setDriverClass(source.getDriverClass()); d.setUrl(source.getUrl()); d.setUser(source.getUser()); d.setPassword(source.getPassword()); } IndexSearcher searcher = null; IndexWriter writer = null; LuceneManager lmgr = null; try { searcher = new IndexSearcher(indexDir); PerFieldAnalyzerWrapper a = LuceneManager.buildAnalyzer(); writer = IndexWriterManager.getWriter(nindexDir, a, true); final int numDocs = searcher.getIndexReader().numDocs(); HashSet deldocs = new HashSet(); HashMap infos = new HashMap(); for (int i = 0; i < numDocs; i++) { Document doc = searcher.doc(i); String delprop = doc.get(DeletedInfos.DELETED); String layerStr = doc.get(LuceneManager.LAYER_OF_SAVE); int layer = -1; try { layer = Integer.parseInt(layerStr); } catch (Exception ex) { layer = -1; } final String id = doc.get(LuceneManager.ID) + DeletedInfos.KEY_SEPERATOR + doc.get(LuceneManager.LAYER_OF_SAVE); if (delprop != null && "true".equals(delprop)/* && layer == DbKey.LIVE_LAYER*/) { deldocs.add(id); } else { Object v; if ((v = infos.get(id)) == null) { infos.put(id, new Integer(i)); } else { final String lmod = doc.get(LuceneManager.LASTMODIFIED); final String lmod_prev = searcher.doc(((Integer) v).intValue()).get("_lastmodified"); if (lmod_prev == null || (lmod != null && lmod.compareTo(lmod_prev) > 0)) { infos.put(id, new Integer(i)); } } } } ArrayList listOfMaps = new ArrayList(); for (int i = 0; i < numDocs; i++) { Document doc = searcher.doc(i); String delprop = doc.get(DeletedInfos.DELETED); String layerStr = doc.get(LuceneManager.LAYER_OF_SAVE); int layer = -1; try { layer = Integer.parseInt(layerStr); } catch (Exception ex) { layer = -1; } final String id = doc.get(LuceneManager.ID) + DeletedInfos.KEY_SEPERATOR + doc.get(LuceneManager.LAYER_OF_SAVE); if (delprop != null && "true".equals(delprop)) { continue; } else if (id != null && deldocs.contains(id)/* && layer == DbKey.LIVE_LAYER*/) { continue; } Integer idx = (Integer) infos.get(id); if (idx != null && i != idx.intValue()) { continue; } Document ndoc = convertDocument(doc); if (this.recordNodes) { listOfMaps.add(LuceneManager.luceneDocumentToMap(doc)); } if (ndoc != null) { writer.addDocument(ndoc); } } if (this.recordNodes) { lmgr = new LuceneManager(this.app, false, true); this.allNodes = new HashMap(); final int size = listOfMaps.size(); for (int i = 0; i < size; i++) { HashMap m = (HashMap) listOfMaps.get(i); INode n = lmgr.mapToNode(m); this.allNodes.put(n.getID(), getPath(n)); n = null; } } } catch (Exception ex) { ex.printStackTrace(); throw new RuntimeException(ex); } finally { if (searcher != null) { try { searcher.close(); } catch (Exception ex) { app.logError(ErrorReporter.errorMsg(this.getClass(), "convert"), ex); } } if (lmgr != null) { lmgr.shutdown(); lmgr = null; } indexDir.close(); SegmentInfos sinfos = IndexObjectsFactory.getFSSegmentInfos(indexDir); sinfos.clear(); IndexObjectsFactory.removeDeletedInfos(indexDir); } Connection conn = null; boolean exceptionOccured = false; try { if (writer != null) { TransSource ts = app.getTransSource(); conn = ts.getConnection(); DatabaseMetaData dmd = conn.getMetaData(); ResultSet rs = dmd.getColumns(null, null, "Lucene", "version"); if (!rs.next()) { final String alterTbl = "ALTER TABLE Lucene ADD version INT NOT NULL DEFAULT 1"; PreparedStatement pstmt = null; try { pstmt = conn.prepareStatement(alterTbl); pstmt.execute(); } catch (SQLException sqle) { app.logError(ErrorReporter.errorMsg(this.getClass(), "convert"), sqle); } finally { if (pstmt != null) { pstmt.close(); pstmt = null; } } } rs.close(); rs = null; writer.close(); writer.flushCache();//TODO:writer.writeSegmentsFile(); LuceneManager.commitSegments(conn, app, writer.getDirectory()); writer.finalizeTrans(); this.updateSQL(conn); } } catch (Exception ex) { ex.printStackTrace(); exceptionOccured = true; throw new RuntimeException(ex); } finally { if (conn != null) { try { if (!conn.getAutoCommit()) { if (!exceptionOccured) { conn.commit(); } else { conn.rollback(); } } conn.close(); } catch (Exception ex) { app.logError(ErrorReporter.errorMsg(this.getClass(), "convert"), ex); } conn = null; } nindexDir.close(); SegmentInfos sinfos = IndexObjectsFactory.getFSSegmentInfos(nindexDir); sinfos.clear(); IndexObjectsFactory.removeDeletedInfos(nindexDir); } if (!dbhome.renameTo(olddbhome)) { throw new Exception("Could not move the old version of the db into " + olddbhome); } if (!ndbhome.renameTo(dbhome)) { throw new Exception("Could not move the newer version of the db into " + dbhome); } File oldBlobDir = new File(olddbhome, "blob"); File newBlobDir = new File(ndbhome, "blob"); oldBlobDir.renameTo(newBlobDir); if (!FileUtils.deleteDir(olddbhome)) { throw new Exception("Could not delete the old version of the db at " + olddbhome); } }
From source file:axiom.scripting.rhino.LuceneQueryDispatcher.java
License:Open Source License
public void luceneResultsToNodes(TopDocs docs, IndexSearcher searcher, ArrayList results, int maxResults, HashSet ids, int _layer) throws Exception { int hitslen = docs.scoreDocs.length; if (hitslen > 0) { if (maxResults < 0) { maxResults = hitslen;/* w ww .j a va 2 s . c o m*/ } final GlobalObject global = this.core != null ? this.core.global : null; final Application app = this.app; final int mode; if (_layer != -1) { mode = _layer; } else { RequestEvaluator reqeval = app.getCurrentRequestEvaluator(); if (reqeval != null) { mode = reqeval.getLayer(); } else { mode = DbKey.LIVE_LAYER; } } final String ID = LuceneManager.ID; NodeManager nmgr = this.app.getNodeManager(); HashSet retrieved = new HashSet(); for (int i = 0, count = 0; i < hitslen && count < maxResults; i++) { Document doc = searcher.doc(docs.scoreDocs[i].doc); String id = doc.get(ID); if (id == null || (ids != null && !ids.contains(id)) || retrieved.contains(id)) { continue; } Key key = new DbKey(this.app.getDbMapping(doc.get(LuceneManager.PROTOTYPE)), id, mode); Node node = nmgr.getNode(key); if (node != null) { if (global != null) { results.add(Context.toObject(node, global)); } else { results.add(node); } retrieved.add(id); count++; } } } }
From source file:axiom.scripting.rhino.LuceneQueryDispatcher.java
License:Open Source License
public int luceneResultsToNodesLength(TopDocs docs, IndexSearcher searcher, int maxResults, HashSet ids, int _layer) throws Exception { int length = 0; int hitslen = docs.scoreDocs.length; if (hitslen > 0) { if (maxResults < 0) { maxResults = hitslen;//from ww w. java 2s . c om } final Application app = this.app; final int mode; if (_layer != -1) { mode = _layer; } else { RequestEvaluator reqeval = app.getCurrentRequestEvaluator(); if (reqeval != null) { mode = reqeval.getLayer(); } else { mode = DbKey.LIVE_LAYER; } } final String ID = LuceneManager.ID; NodeManager nmgr = this.app.getNodeManager(); HashSet retrieved = new HashSet(); for (int i = 0, count = 0; i < hitslen && count < maxResults; i++) { Document doc = searcher.doc(docs.scoreDocs[i].doc); String id = doc.get(ID); if (id == null || (ids != null && !ids.contains(id)) || retrieved.contains(id)) { continue; } retrieved.add(id); count++; length++; } } return length; }
From source file:axiom.scripting.rhino.LuceneQueryDispatcher.java
License:Open Source License
public void luceneResultsToFields(TopDocs docs, IndexSearcher searcher, ArrayList results, int maxResults, HashSet ids, String field, boolean unique, int _layer) throws Exception { int hitslen = docs.scoreDocs.length; if (hitslen > 0) { if (maxResults < 0) { maxResults = hitslen;//from w w w.j a v a 2 s . c o m } final String ID = LuceneManager.ID; final int mode; if (_layer != -1) { mode = _layer; } else { RequestEvaluator reqeval = this.app.getCurrentRequestEvaluator(); if (reqeval != null) { mode = reqeval.getLayer(); } else { mode = DbKey.LIVE_LAYER; } } NodeManager nmgr = this.app.getNodeManager(); HashSet retrieved = new HashSet(); for (int i = 0, count = 0; i < hitslen && count < maxResults; i++) { Document d = searcher.doc(docs.scoreDocs[i].doc); String id = d.get(ID); if (id == null || (ids != null && !ids.contains(id)) || retrieved.contains(id)) { continue; } DbMapping dbmap = this.app.getDbMapping(d.getField(LuceneManager.PROTOTYPE).stringValue()); Key key = new DbKey(dbmap, d.getField(LuceneManager.ID).stringValue(), mode); INode node = nmgr.getNode(key); String f = null; if (node != null) { f = node.getString(field); } if (f == null) { f = d.getField(field).stringValue(); } if (unique) { if (!results.contains(f)) { results.add(f); retrieved.add(id); count++; } } else { results.add(f); retrieved.add(id); count++; } } } }
From source file:axiom.scripting.rhino.LuceneQueryDispatcher.java
License:Open Source License
private ArrayList hitsToKeys(Object hits, LuceneQueryParams params, int _layer, HashSet ids) throws IOException { ArrayList keys = new ArrayList(); RequestEvaluator reqeval = this.app.getCurrentRequestEvaluator(); int layer;/*from w w w . j a v a2s. c o m*/ if (_layer != -1) { layer = _layer; } else { layer = DbKey.LIVE_LAYER; if (reqeval != null) { layer = reqeval.getLayer(); } } if (hits instanceof Hits) { Hits h = (Hits) hits; final int length = h.length(); HashSet retrieved = new HashSet(); for (int i = 0; i < length; i++) { Document doc = h.doc(i); String id = doc.get(LuceneManager.ID); String prototype = doc.get(LuceneManager.PROTOTYPE); if (id == null || (ids != null && !ids.contains(id)) || retrieved.contains(id)) { // if (id == null || retrieved.contains(id)) { continue; } Key key = new DbKey(this.app.getDbMapping(prototype), id, layer); keys.add(key); retrieved.add(id); } } else if (hits instanceof TopDocs) { TopDocs td = (TopDocs) hits; final int length = td.scoreDocs.length; HashSet retrieved = new HashSet(); IndexSearcher searcher = params.searcher; for (int i = 0; i < length; i++) { Document doc = searcher.doc(td.scoreDocs[i].doc); String id = doc.get(LuceneManager.ID); String prototype = doc.get(LuceneManager.PROTOTYPE); if (id == null || (ids != null && !ids.contains(id)) || retrieved.contains(id)) { // if (id == null || retrieved.contains(id)) { continue; } Key key = new DbKey(this.app.getDbMapping(prototype), id, layer); keys.add(key); retrieved.add(id); } } return keys; }
From source file:axiom.scripting.rhino.LuceneQueryDispatcher.java
License:Open Source License
public ArrayList getVersionFields(Object obj, Object fields, ArrayList prototypes, IFilter filter, Object options) throws Exception { String id = null;//from w w w. j a v a 2 s.c o m if (obj instanceof String) { id = (String) obj; } else if (obj instanceof INode) { id = ((INode) obj).getID(); } else if (obj instanceof Scriptable) { id = ScriptRuntime.toString(obj); } else { id = obj.toString(); } Scriptable idfilter = Context.getCurrentContext().newObject(this.core.getScope()); idfilter.put(LuceneManager.ID, idfilter, id); IFilter newFilter = AndFilterObject.filterObjCtor(null, new Object[] { filter, idfilter }, null, false); SortObject sort = getSortObject((Scriptable) options); ArrayList opaths = getPaths((Scriptable) options); int _layer = getLayer((Scriptable) options); RequestEvaluator reqeval = this.app.getCurrentRequestEvaluator(); _layer = (_layer == -1) ? (reqeval != null) ? reqeval.getLayer() : DbKey.LIVE_LAYER : _layer; int maxResults = getMaxResults((Scriptable) options); IndexSearcher searcher = new IndexSearcher(this.lmgr.getDirectory(), true); Object hits = this.luceneHits(prototypes, newFilter, sort, maxResults, opaths, searcher, null, _layer); ArrayList<Scriptable> versions = new ArrayList<Scriptable>(); if (hits != null) { int hitslength = hits instanceof Hits ? ((Hits) hits).length() : ((TopDocs) hits).scoreDocs.length; if (hitslength > 0) { if (maxResults < 0) { maxResults = hitslength; } } for (int i = 0, count = 0; i < hitslength && count < maxResults; i++) { Document doc = hits instanceof Hits ? ((Hits) hits).doc(i) : searcher.doc(((TopDocs) hits).scoreDocs[i].doc); if (doc != null) { ArrayList<String> _fields = new ArrayList<String>(); if (fields instanceof String) { _fields.add((String) fields); } else if (fields instanceof Scriptable) { String className = ((Scriptable) fields).getClassName(); if (className.equals("String")) { _fields.add(fields.toString()); } else if (className.equals("Array")) { Scriptable arr = (Scriptable) fields; final int arrlen = arr.getIds().length; for (int j = 0; j < arrlen; j++) { _fields.add(arr.get(j, arr).toString()); } } else { _fields.add(fields.toString()); } } Scriptable version = Context.getCurrentContext().newObject(this.core.getScope()); for (int j = 0; j < _fields.size(); j++) { String field = _fields.get(j); Object value = doc.get(field) != null ? doc.get(field) : Undefined.instance; version.put(field, version, value); } count++; versions.add(version); } } } return versions; }
From source file:back.Searcher.java
License:Apache License
/** * This demonstrates a typical paging search scenario, where the search * engine presents pages of size n to the user. The user can then go to the * next page if interested in the next hits. * //from w ww .j a v a 2s .c o m * When the query is executed for the first time, then only enough results * are collected to fill 5 result pages. If the user wants to page beyond * this limit, then the query is executed another time and all hits are * collected. * */ public static void doPagingSearch(BufferedReader in, IndexSearcher searcher, Query query, int hitsPerPage, boolean raw, boolean interactive, List<String[]> resu, int consulta) throws IOException { //Relevante para Mineracao int[] relevantes = new int[3]; relevantes[consulta - 1] = 0; for (int i = 0; i < 200; i++) { String[] lista = resu.get(consulta - 1); if (Integer.parseInt(lista[i]) == 1) { relevantes[consulta - 1]++; } } int recuperados; int relevantesRecuperados = 0; // Collect enough docs to show 5 pages TopDocs results = searcher.search(query, 5 * hitsPerPage); ScoreDoc[] hits = results.scoreDocs; int numTotalHits = results.totalHits; recuperados = numTotalHits; System.out.println(numTotalHits + " total matching documents"); int start = 0; int end = Math.min(numTotalHits, hitsPerPage); while (true) { if (end > hits.length) { System.out.println("Only results 1 - " + hits.length + " of " + numTotalHits + " total matching documents collected."); System.out.println("Collect more (y/n) ?"); String line = in.readLine(); if (line.length() == 0 || line.charAt(0) == 'n') { break; } hits = searcher.search(query, numTotalHits).scoreDocs; } end = Math.min(hits.length, start + hitsPerPage); //troquei end por numTotalHits //LAO PRINCIPAL for (int i = start; i < end; i++) { if (raw) { // output raw format System.out.println("doc=" + hits[i].doc + " score=" + hits[i].score); continue; } Document doc = searcher.doc(hits[i].doc); String path = doc.get("path"); if (path != null) { System.out.print((i + 1) + ". " + path); String title = doc.get("title"); if (title != null) { System.out.println(" Title: " + doc.get("title")); } int docEncontrado = 0; if (path != null) { String docNumberString = path.substring(path.indexOf('#') + 1, path.indexOf(".txt")); docEncontrado = Integer.parseInt(docNumberString); } if (consulta != 0) { String[] lista = resu.get(consulta - 1); if (Integer.parseInt(lista[docEncontrado - 1]) == 1) { System.out.print(" [RELEVANT]"); relevantesRecuperados++; } } System.out.println(); } else { System.out.println((i + 1) + ". " + "No path for this document"); } } //mais matematica para mineracao if (consulta != 0) { double precision = ((double) relevantesRecuperados) / ((double) recuperados); double recall = ((double) relevantesRecuperados) / ((double) relevantes[consulta - 1]); double fmeasure = 2 * (precision * recall) / (precision + recall); System.out.println("Using information of Relevancy Matrix Row " + consulta); System.out.println("Precision: " + precision + " | Recall: " + recall); System.out.println("F-Measure: " + fmeasure); } if (!interactive || end == 0) { break; } if (numTotalHits >= end) { boolean quit = false; while (true) { System.out.print("Press "); if (start - hitsPerPage >= 0) { System.out.print("(p)revious page, "); } if (start + hitsPerPage < numTotalHits) { System.out.print("(n)ext page, "); } System.out.println("(q)uit or enter number to jump to a page."); String line = in.readLine(); if (line.length() == 0 || line.charAt(0) == 'q') { quit = true; break; } if (line.charAt(0) == 'p') { start = Math.max(0, start - hitsPerPage); break; } else if (line.charAt(0) == 'n') { if (start + hitsPerPage < numTotalHits) { start += hitsPerPage; } break; } else { int page = Integer.parseInt(line); if ((page - 1) * hitsPerPage < numTotalHits) { start = (page - 1) * hitsPerPage; break; } else { System.out.println("No such page"); } } } if (quit) break; end = Math.min(numTotalHits, start + hitsPerPage); } } }
From source file:bajavista.Buscador.java
public ArrayList<Informacion> buscarContenido(String busqueda) throws IOException, ParseException { StandardAnalyzer analyzer = new StandardAnalyzer(Version.LUCENE_43); File indexDirES = new File(dirIndexES); Directory indexES = FSDirectory.open(indexDirES); //File indexDirNONES = new File(dirIndexNONES); //Directory indexNONES = FSDirectory.open(indexDirNONES); // 2. Query/*from ww w . j a v a 2s .c om*/ String querystr = busqueda; Query q = new QueryParser(Version.LUCENE_43, "text", analyzer).parse(querystr); //Query qNONES = new QueryParser(Version.LUCENE_43, "contenido", analyzer).parse(querystr); // 3. Search int hitsPage = 1024; IndexReader reader = DirectoryReader.open(indexES); IndexSearcher searcher = new IndexSearcher(reader); //IndexReader readerNONES = DirectoryReader.open(indexNONES); //IndexSearcher searcherNONES = new IndexSearcher(readerNONES); TopScoreDocCollector collector = TopScoreDocCollector.create(hitsPage, true); //TopScoreDocCollector collectorNONES = TopScoreDocCollector.create(hitsPage, true); searcher.search(q, collector); //searcherNONES.search(q, collectorNONES); ScoreDoc[] hits = collector.topDocs().scoreDocs; // ScoreDoc[] hitsNONES = collectorNONES.topDocs().scoreDocs; // 4. Return results for (int i = 0; i < hits.length; ++i) { int docId = hits[i].doc; Document data = searcher.doc(docId); info = new Informacion(Integer.parseInt(data.get("idUser")), Long.parseLong(data.get("timestamp")), data.get("text"), Double.parseDouble(data.get("objective")), Double.parseDouble(data.get("subjective")), Double.parseDouble(data.get("positive")), Double.parseDouble(data.get("negative")), Integer.parseInt(data.get("need"))); listaInfo.add(info); } /*System.out.println("No ES Found " + hitsNONES.length + " hits."); for(int i=0;i<hitsNONES.length;++i) { int docId = hitsNONES[i].doc; Document d = searcherNONES.doc(docId); System.out.println((i + 1) + ". " + d.get("es") + "\t" + d.get("contenido")); }*/ reader.close(); //readerNONES.close(); return listaInfo; }