List of usage examples for org.apache.lucene.search IndexSearcher search
public <C extends Collector, T> T search(Query query, CollectorManager<C, T> collectorManager) throws IOException
From source file:antnlp.opie.indexsearch.SearchFiles.java
License:Apache License
/** Simple command-line based search demo. */ public static void main(String[] args) throws Exception { String usage = "Usage:\tjava org.apache.lucene.demo.SearchFiles [-index dir] [-field f] [-repeat n] [-queries file] [-query string] [-raw] [-paging hitsPerPage]\n\nSee http://lucene.apache.org/core/4_1_0/demo/ for details."; if (args.length > 0 && ("-h".equals(args[0]) || "-help".equals(args[0]))) { System.out.println(usage); System.exit(0);//w w w . j a v a 2s. c o m } String index = "index"; String field = "contents"; String queries = null; int repeat = 0; boolean raw = false; String queryString = null; int hitsPerPage = 10; for (int i = 0; i < args.length; i++) { if ("-index".equals(args[i])) { index = args[i + 1]; i++; } else if ("-field".equals(args[i])) { field = args[i + 1]; i++; } else if ("-queries".equals(args[i])) { queries = args[i + 1]; i++; } else if ("-query".equals(args[i])) { queryString = args[i + 1]; i++; } else if ("-repeat".equals(args[i])) { repeat = Integer.parseInt(args[i + 1]); i++; } else if ("-raw".equals(args[i])) { raw = true; } else if ("-paging".equals(args[i])) { hitsPerPage = Integer.parseInt(args[i + 1]); if (hitsPerPage <= 0) { System.err.println("There must be at least 1 hit per page."); System.exit(1); } i++; } } IndexReader reader = DirectoryReader.open(FSDirectory.open(Paths.get(index))); IndexSearcher searcher = new IndexSearcher(reader); //Analyzer analyzer = new StandardAnalyzer(); //Analyzer analyzer = new StandardAnalyzer(CharArraySet.EMPTY_SET); Analyzer analyzer = new WhitespaceAnalyzer(); BufferedReader in = null; if (queries != null) { in = Files.newBufferedReader(Paths.get(queries), StandardCharsets.UTF_8); } else { in = new BufferedReader(new InputStreamReader(System.in, StandardCharsets.UTF_8)); } //QueryParser parser = new QueryParser(field, analyzer); QueryBuilder builder = new QueryBuilder(analyzer); while (true) { if (queries == null && queryString == null) { // prompt the user System.out.println("Enter query: "); } String line = queryString != null ? queryString : in.readLine(); if (line == null || line.length() == -1) { break; } line = line.trim(); if (line.length() == 0) { break; } //Query query = parser.parse(line); Query query = builder.createPhraseQuery(field, line); System.out.println("Searching for: " + query.toString(field)); if (repeat > 0) { // repeat & time as benchmark Date start = new Date(); for (int i = 0; i < repeat; i++) { searcher.search(query, 100); } Date end = new Date(); System.out.println("Time: " + (end.getTime() - start.getTime()) + "ms"); } doPagingSearch(in, searcher, query, hitsPerPage, raw, queries == null && queryString == null); if (queryString != null) { break; } } reader.close(); }
From source file:antnlp.opie.indexsearch.SearchFiles.java
License:Apache License
/** * This demonstrates a typical paging search scenario, where the search engine presents * pages of size n to the user. The user can then go to the next page if interested in * the next hits./*from w w w. j a v a 2 s. com*/ * * When the query is executed for the first time, then only enough results are collected * to fill 5 result pages. If the user wants to page beyond this limit, then the query * is executed another time and all hits are collected. * */ public static void doPagingSearch(BufferedReader in, IndexSearcher searcher, Query query, int hitsPerPage, boolean raw, boolean interactive) throws IOException { // Collect enough docs to show 5 pages //TopDocs results = searcher.search(query, 5 * hitsPerPage); TopDocs results = searcher.search(query, hitsPerPage); ScoreDoc[] hits = results.scoreDocs; int numTotalHits = results.totalHits; System.out.println(numTotalHits + " total matching documents"); int start = 0; int end = Math.min(numTotalHits, hitsPerPage); while (true) { if (end > hits.length) { System.out.println("Only results 1 - " + hits.length + " of " + numTotalHits + " total matching documents collected."); System.out.println("Collect more (y/n) ?"); String line = in.readLine(); if (line.length() == 0 || line.charAt(0) == 'n') { break; } hits = searcher.search(query, numTotalHits).scoreDocs; } end = Math.min(hits.length, start + hitsPerPage); for (int i = start; i < end; i++) { if (raw) { // output raw format System.out.println("doc=" + hits[i].doc + " score=" + hits[i].score); continue; } Document doc = searcher.doc(hits[i].doc); String docid = doc.get("docid"); if (docid != null) { System.out.println((i + 1) + ". " + docid); String title = doc.get("title"); if (title != null) { System.out.println(" Title: " + doc.get("title")); } } else { System.out.println((i + 1) + ". " + "No docid for this document"); } } if (!interactive || end == 0) { break; } if (numTotalHits >= end) { boolean quit = false; while (true) { System.out.print("Press "); if (start - hitsPerPage >= 0) { System.out.print("(p)revious page, "); } if (start + hitsPerPage < numTotalHits) { System.out.print("(n)ext page, "); } System.out.println("(q)uit or enter number to jump to a page."); String line = in.readLine(); if (line.length() == 0 || line.charAt(0) == 'q') { quit = true; break; } if (line.charAt(0) == 'p') { start = Math.max(0, start - hitsPerPage); break; } else if (line.charAt(0) == 'n') { if (start + hitsPerPage < numTotalHits) { start += hitsPerPage; } break; } else { int page = Integer.parseInt(line); if ((page - 1) * hitsPerPage < numTotalHits) { start = (page - 1) * hitsPerPage; break; } else { System.out.println("No such page"); } } } if (quit) break; end = Math.min(numTotalHits, start + hitsPerPage); } } }
From source file:aos.lucene.analysis.codec.MetaphoneAnalyzerTest.java
License:Apache License
public void testKoolKat() throws Exception { RAMDirectory directory = new RAMDirectory(); Analyzer analyzer = new MetaphoneReplacementAnalyzer(); IndexWriter writer = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED); Document doc = new Document(); doc.add(new Field("contents", "cool cat", Field.Store.YES, Field.Index.ANALYZED)); writer.addDocument(doc);//w w w . ja va 2 s . c om writer.close(); IndexSearcher searcher = new IndexSearcher(directory); Query query = new QueryParser(Version.LUCENE_46, "contents", analyzer).parse("kool kat"); TopDocs hits = searcher.search(query, 1); assertEquals(1, hits.totalHits); int docID = hits.scoreDocs[0].doc; Document storedDoc = searcher.doc(docID); assertEquals("cool cat", storedDoc.get("contents")); searcher.close(); }
From source file:aos.lucene.remote.SearchClient.java
License:Apache License
private static void search(String name, String word) throws Exception { TermQuery query = new TermQuery(new Term("word", word)); IndexSearcher searcher = (IndexSearcher) searcherCache.get(name); if (searcher == null) { searcher = new IndexSearcher(new IndexSearcher[] { lookupRemote(name) }); searcherCache.put(name, searcher); }//from w w w.j av a2s .c o m long begin = new Date().getTime(); TopDocs hits = searcher.search(query, 10); long end = new Date().getTime(); System.out.print("Searched " + name + " for '" + word + "' (" + (end - begin) + " ms): "); if (hits.scoreDocs.length == 0) { System.out.print("<NONE FOUND>"); } for (ScoreDoc sd : hits.scoreDocs) { Document doc = searcher.doc(sd.doc); String[] values = doc.getValues("syn"); for (String syn : values) { System.out.print(syn + " "); } } LOGGER.info(); LOGGER.info(); }
From source file:aos.lucene.search.advanced.MultiFieldQueryParserTest.java
License:Apache License
public void testDefaultOperator() throws Exception { Query query = new MultiFieldQueryParser(Version.LUCENE_46, new String[] { "title", "subject" }, new SimpleAnalyzer()).parse("development"); Directory dir = TestUtil.getBookIndexDirectory(); IndexSearcher searcher = new IndexSearcher(dir, true); TopDocs hits = searcher.search(query, 10); assertTrue(TestUtil.hitsIncludeTitle(searcher, hits, "Ant in Action")); assertTrue(TestUtil.hitsIncludeTitle( //A searcher, //A hits, //A "Extreme Programming Explained")); //A searcher.close();/*from www. j a v a2s . co m*/ dir.close(); }
From source file:aos.lucene.search.advanced.MultiFieldQueryParserTest.java
License:Apache License
public void testSpecifiedOperator() throws Exception { Query query = MultiFieldQueryParser.parse(Version.LUCENE_46, "lucene", new String[] { "title", "subject" }, new BooleanClause.Occur[] { BooleanClause.Occur.MUST, BooleanClause.Occur.MUST }, new SimpleAnalyzer()); Directory dir = TestUtil.getBookIndexDirectory(); IndexSearcher searcher = new IndexSearcher(dir, true); TopDocs hits = searcher.search(query, 10); assertTrue(TestUtil.hitsIncludeTitle(searcher, hits, "Lucene in Action, Second Edition")); assertEquals("one and only one", 1, hits.scoreDocs.length); searcher.close();/*from w w w.j av a 2s. com*/ dir.close(); }
From source file:aos.lucene.search.advanced.IndexSearcherTest.java
License:Apache License
public void testMulti() throws Exception { IndexSearcher searcher = new IndexSearcher(searchers); TermRangeQuery query = new TermRangeQuery("animal", // "h", // "t", // true, true);// #3 TopDocs hits = searcher.search(query, 10); assertEquals("tarantula not included", 12, hits.totalHits); }
From source file:aos.lucene.search.advanced.TimeLimitingCollectorTest.java
License:Apache License
public void testTimeLimitingCollector() throws Exception { Directory dir = TestUtil.getBookIndexDirectory(); IndexSearcher searcher = new IndexSearcher(dir); Query q = new MatchAllDocsQuery(); int numAllBooks = TestUtil.hitCount(searcher, q); TopScoreDocCollector topDocs = TopScoreDocCollector.create(10, false); Collector collector = new TimeLimitingCollector(topDocs, // #A 1000); // #A try {/*from ww w . j a va2 s. c o m*/ searcher.search(q, collector); assertEquals(numAllBooks, topDocs.getTotalHits()); // #B } catch (TimeExceededException tee) { // #C LOGGER.info("Too much time taken."); // #C } // #C searcher.close(); dir.close(); }
From source file:aos.lucene.search.ext.collector.CollectorTest.java
License:Apache License
public void testCollecting() throws Exception { Directory dir = TestUtil.getBookIndexDirectory(); TermQuery query = new TermQuery(new Term("contents", "junit")); IndexSearcher searcher = new IndexSearcher(dir); BookLinkCollector collector = new BookLinkCollector(); searcher.search(query, collector); Map<String, String> linkMap = collector.getLinks(); assertEquals("ant in action", linkMap.get("http://www.manning.com/loughran")); TopDocs hits = searcher.search(query, 10); TestUtil.dumpHits(searcher, hits);/*from www .j ava 2 s . com*/ searcher.close(); dir.close(); }
From source file:aos.lucene.search.ext.payloads.PayloadsTest.java
License:Apache License
public void testPayloadTermQuery() throws Throwable { addDoc("Hurricane warning", "Bulletin: A hurricane warning was issued at " + "6 AM for the outer great banks"); addDoc("Warning label maker", "The warning label maker is a delightful toy for " + "your precocious seven year old's warning needs"); addDoc("Tornado warning", "Bulletin: There is a tornado warning for " + "Worcester county until 6 PM today"); IndexReader r = writer.getReader();/*from ww w . jav a 2 s . com*/ writer.close(); IndexSearcher searcher = new IndexSearcher(r); searcher.setSimilarity(new BoostingSimilarity()); Term warning = new Term("contents", "warning"); Query query1 = new TermQuery(warning); LOGGER.info("\nTermQuery results:"); TopDocs hits = searcher.search(query1, 10); TestUtil.dumpHits(searcher, hits); assertEquals("Warning label maker", // #B searcher.doc(hits.scoreDocs[0].doc).get("title")); // #B Query query2 = new PayloadTermQuery(warning, new AveragePayloadFunction()); LOGGER.info("\nPayloadTermQuery results:"); hits = searcher.search(query2, 10); TestUtil.dumpHits(searcher, hits); assertEquals("Warning label maker", // #C searcher.doc(hits.scoreDocs[2].doc).get("title")); // #C r.close(); searcher.close(); }