Example usage for org.apache.lucene.search IndexSearcher IndexSearcher

List of usage examples for org.apache.lucene.search IndexSearcher IndexSearcher

Introduction

In this page you can find the example usage for org.apache.lucene.search IndexSearcher IndexSearcher.

Prototype

public IndexSearcher(IndexReaderContext context) 

Source Link

Document

Creates a searcher searching the provided top-level IndexReaderContext .

Usage

From source file:com.liferay.portal.util.LuceneUtil.java

License:Open Source License

public static IndexSearcher getSearcher(String companyId) throws IOException {

    Directory directory = FSDirectory.open(new File(getLuceneDir(companyId)));
    DirectoryReader ireader = DirectoryReader.open(directory);

    return new IndexSearcher(ireader);
}

From source file:com.lin.studytest.lucene.SearchFiles.java

License:Apache License

/** Simple command-line based search demo. */
public static void main(String[] args) throws Exception {
    String usage = "Usage:\tjava org.apache.lucene.demo.SearchFiles [-index dir] [-field f] [-repeat n] [-queries file] [-query string] [-raw] [-paging hitsPerPage]\n\nSee http://lucene.apache.org/core/4_1_0/demo/ for details.";
    if (args.length > 0 && ("-h".equals(args[0]) || "-help".equals(args[0]))) {
        System.out.println(usage);
        System.exit(0);/*from w  ww.  j  a  va2s  . co m*/
    }

    String index = "D:\\software\\lucene\\testdata\\indexpath";
    String field = "contents";
    String queries = null;

    int repeat = 100;
    boolean raw = false;
    String queryString = "";
    int hitsPerPage = 10;

    //    for(int i = 0;i < args.length;i++) {
    //      if ("-index".equals(args[i])) {
    //        index = args[i+1];
    //        i++;
    //      } else if ("-field".equals(args[i])) {
    //        field = args[i+1];
    //        i++;
    //      } else if ("-queries".equals(args[i])) {
    //        queries = args[i+1];
    //        i++;
    //      } else if ("-query".equals(args[i])) {
    //        queryString = args[i+1];
    //        i++;
    //      } else if ("-repeat".equals(args[i])) {
    //        repeat = Integer.parseInt(args[i+1]);
    //        i++;
    //      } else if ("-raw".equals(args[i])) {
    //        raw = true;
    //      } else if ("-paging".equals(args[i])) {
    //        hitsPerPage = Integer.parseInt(args[i+1]);
    //        if (hitsPerPage <= 0) {
    //          System.err.println("There must be at least 1 hit per page.");
    //          System.exit(1);
    //        }
    //        i++;
    //      }
    //    }

    IndexReader reader = DirectoryReader.open(FSDirectory.open(Paths.get(index)));
    IndexSearcher searcher = new IndexSearcher(reader);
    Analyzer analyzer = new StandardAnalyzer();

    BufferedReader in = null;
    if (queries != null) {
        in = Files.newBufferedReader(Paths.get(queries), StandardCharsets.UTF_8);
    } else {
        in = new BufferedReader(new InputStreamReader(System.in, StandardCharsets.UTF_8));
    }
    QueryParser parser = new QueryParser(field, analyzer);
    while (true) {
        if (queries == null && queryString == null) { // prompt the user
            System.out.println("Enter query: ");
        }

        String line = queryString != null ? queryString : in.readLine();

        if (line == null || line.length() == -1) {
            break;
        }

        line = line.trim();
        if (line.length() == 0) {
            break;
        }

        Query query = parser.parse(line);
        System.out.println("Searching for: " + query.toString(field));

        if (repeat > 0) { // repeat & time as benchmark
            Date start = new Date();
            for (int i = 0; i < repeat; i++) {
                searcher.search(query, 100);
            }
            Date end = new Date();
            System.out.println("Time: " + (end.getTime() - start.getTime()) + "ms");
        }

        doPagingSearch(in, searcher, query, hitsPerPage, raw, queries == null && queryString == null);

        if (queryString != null) {
            break;
        }
    }
    reader.close();
}

From source file:com.liusoft.dlog4j.search.SearchProxy.java

License:Open Source License

/**
 * ?//from w w w .  j ava 2s.  c om
 * @param params
 * @return
 * @throws Exception 
 */
public static List search(SearchParameter params) throws Exception {
    if (params == null)
        return null;

    SearchEnabled searching = (SearchEnabled) params.getSearchObject().newInstance();

    StringBuffer path = new StringBuffer(_baseIndexPath);
    path.append(searching.name());
    File f = new File(path.toString());
    if (!f.exists())
        return null;

    IndexSearcher searcher = new IndexSearcher(path.toString());

    //?
    BooleanQuery comboQuery = new BooleanQuery();
    int _query_count = 0;
    StringTokenizer st = new StringTokenizer(params.getSearchKey());
    while (st.hasMoreElements()) {
        String q = st.nextToken();
        String[] indexFields = searching.getIndexFields();
        for (int i = 0; i < indexFields.length; i++) {
            QueryParser qp = new QueryParser(indexFields[i], analyzer);
            try {
                Query subjectQuery = qp.parse(q);
                comboQuery.add(subjectQuery, BooleanClause.Occur.SHOULD);
                _query_count++;
            } catch (Exception e) {
                log.error("Add query parameter failed. key=" + q, e);
            }
        }
    }

    if (_query_count == 0)//?
        return null;

    //??
    MultiFilter multiFilter = null;
    HashMap conds = params.getConditions();
    if (conds != null) {
        Iterator keys = conds.keySet().iterator();
        while (keys.hasNext()) {
            if (multiFilter == null)
                multiFilter = new MultiFilter(0);
            String key = (String) keys.next();
            multiFilter.add(new FieldFilter(key, conds.get(key).toString()));
        }
    }

    /*
     * Creates a sort, possibly in reverse,
     * by terms in the given field with the type of term values explicitly given.
     */
    SortField[] s_fields = new SortField[2];
    s_fields[0] = SortField.FIELD_SCORE;
    s_fields[1] = new SortField(searching.getKeywordField(), SortField.INT, true);
    Sort sort = new Sort(s_fields);

    Hits hits = searcher.search(comboQuery, multiFilter, sort);
    int numResults = hits.length();
    //System.out.println(numResults + " found............................");
    int result_count = Math.min(numResults, MAX_RESULT_COUNT);
    List results = new ArrayList(result_count);
    for (int i = 0; i < result_count; i++) {
        Document doc = (Document) hits.doc(i);
        //Java
        Object result = params.getSearchObject().newInstance();
        Enumeration fields = doc.fields();
        while (fields.hasMoreElements()) {
            Field field = (Field) fields.nextElement();
            //System.out.println(field.name()+" -- "+field.stringValue());
            if (CLASSNAME_FIELD.equals(field.name()))
                continue;
            //?
            if (!field.isStored())
                continue;
            //System.out.println("=========== begin to mapping ============");
            //String --> anything
            Class fieldType = getNestedPropertyType(result, field.name());
            //System.out.println(field.name()+", class = " + fieldType.getName());
            Object fieldValue = null;
            if (fieldType.equals(Date.class))
                fieldValue = new Date(Long.parseLong(field.stringValue()));
            else
                fieldValue = ConvertUtils.convert(field.stringValue(), fieldType);
            //System.out.println(fieldValue+", class = " + fieldValue.getClass().getName());
            setNestedProperty(result, field.name(), fieldValue);
        }
        results.add(result);
    }

    return results;
}

From source file:com.lorelib.analyzer.sample.LuceneIndexAndSearchDemo.java

License:Apache License

/**
 * //from w w w . j  a v  a  2 s  .com
 * ???
 * @param args
 */
public static void main(String[] args) {
    //Lucene Document??
    String fieldName = "text";
    //
    String text = "IK Analyzer???????";

    //IKAnalyzer?
    Analyzer analyzer = new IKAnalyzer(true);

    Directory directory = null;
    IndexWriter iwriter = null;
    IndexReader ireader = null;
    IndexSearcher isearcher = null;
    try {
        //
        directory = new RAMDirectory();

        //?IndexWriterConfig
        IndexWriterConfig iwConfig = new IndexWriterConfig(analyzer);
        iwConfig.setOpenMode(OpenMode.CREATE_OR_APPEND);
        iwriter = new IndexWriter(directory, iwConfig);
        //
        Document doc = new Document();
        doc.add(new StringField("ID", "10000", Field.Store.YES));
        doc.add(new TextField(fieldName, text, Field.Store.YES));
        iwriter.addDocument(doc);
        iwriter.close();

        //?**********************************
        //?
        ireader = DirectoryReader.open(directory);
        isearcher = new IndexSearcher(ireader);

        String keyword = "?";
        //QueryParser?Query
        QueryParser qp = new QueryParser(fieldName, analyzer);
        qp.setDefaultOperator(QueryParser.AND_OPERATOR);
        Query query = qp.parse(keyword);
        System.out.println("Query = " + query);

        //?5?
        TopDocs topDocs = isearcher.search(query, 5);
        System.out.println("" + topDocs.totalHits);
        //
        ScoreDoc[] scoreDocs = topDocs.scoreDocs;
        for (int i = 0; i < topDocs.totalHits; i++) {
            Document targetDoc = isearcher.doc(scoreDocs[i].doc);
            System.out.println("" + targetDoc.toString());
        }

    } catch (CorruptIndexException e) {
        e.printStackTrace();
    } catch (LockObtainFailedException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    } catch (ParseException e) {
        e.printStackTrace();
    } finally {
        if (ireader != null) {
            try {
                ireader.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        if (directory != null) {
            try {
                directory.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }
}

From source file:com.luc.searcher.LuceneFileSearcher.java

License:Apache License

public List<String> search(File indexDir, String query) throws IOException, ParseException {

    list = new ArrayList<String>();

    dir = new SimpleFSDirectory(indexDir, null);

    pQuery = new PhraseQuery();

    String[] splittedQuery = query.split(" ");

    /*/*from  w  ww .  j a v  a  2 s  .  c  o m*/
     * Build a Phrase Query 
     */

    for (int i = 0; i < splittedQuery.length; i++) {

        pQuery.add(new Term("contents", splittedQuery[i]));

    }

    pQuery.setSlop(5);

    indexSearcher = new IndexSearcher(dir);

    /*
     * Call query parser to generate search query
     */

    qp = new QueryParser(Version.LUCENE_35, "contents", new StandardAnalyzer(Version.LUCENE_35));

    q = qp.parse(query);

    /*
     * create a boolean query using phrase query and output of query parser to get a query for better outputs
     */

    bQuery = new BooleanQuery();
    bQuery.add(q, BooleanClause.Occur.SHOULD);
    bQuery.add(pQuery, BooleanClause.Occur.SHOULD);

    /*
     * Retrive files Matching the query
     */

    TopDocs topDocs = indexSearcher.search(bQuery, 10);

    for (int i = 0; i < topDocs.totalHits; i++) {

        ScoreDoc match = topDocs.scoreDocs[i];

        Document doc = indexSearcher.doc(match.doc);

        list.add(doc.get("filename"));

        /*
                
        System.out.println(doc.get("filename"));
                
        */

    }

    return list;

}

From source file:com.lucene.basic.provider.SearcherProvider.java

License:Apache License

@Override
public IndexSearcher get() throws Exception {
    IndexReader indexReader = readerProvider.get();
    return new IndexSearcher(indexReader);
}

From source file:com.lucene.index.test.IKAnalyzerdemo.java

License:Apache License

/**
 * //from w w  w  . j  a  v  a2 s .c  o  m
 * ???
 * @param args
 */
public static void main(String[] args) {
    //Lucene Document??
    String fieldName = "text";
    //
    String text1 = "oracle,?";
    String text2 = "?";
    String text3 = "?";

    //IKAnalyzer?
    Analyzer analyzer = new IKAnalyzer();

    Directory directory1 = null;
    Directory directory2 = null;
    IndexWriter iwriter1 = null;
    IndexWriter iwriter2 = null;
    IndexReader ireader1 = null;
    IndexReader ireader2 = null;
    IndexSearcher isearcher = null;
    try {
        //
        directory1 = new RAMDirectory();
        directory2 = new RAMDirectory();

        //?IndexWriterConfig

        IndexWriterConfig iwConfig1 = new IndexWriterConfig(analyzer);
        iwConfig1.setOpenMode(OpenMode.CREATE);

        IndexWriterConfig iwConfig2 = new IndexWriterConfig(analyzer);
        iwConfig2.setOpenMode(OpenMode.CREATE);
        iwriter1 = new IndexWriter(directory1, iwConfig1);
        iwriter2 = new IndexWriter(directory2, iwConfig2);

        //
        Document doc1 = new Document();
        doc1.add(new StringField("ID", "10000", Field.Store.YES));
        doc1.add(new TextField("text1", text1, Field.Store.YES));
        iwriter1.addDocument(doc1);

        Document doc2 = new Document();
        doc2.add(new StringField("ID", "10001", Field.Store.YES));
        doc2.add(new TextField("text2", text2, Field.Store.YES));
        iwriter2.addDocument(doc2);

        iwriter1.close();
        iwriter2.close();

        //?**********************************
        //?   
        ireader1 = DirectoryReader.open(directory1);
        ireader2 = DirectoryReader.open(directory2);

        IndexReader[] mreader = { ireader1, ireader2 };

        MultiReader multiReader = new MultiReader(mreader);

        isearcher = new IndexSearcher(multiReader);

        String keyword = "?";
        //QueryParser?Query
        String[] fields = { "text1", "text2" };

        Map<String, Float> boosts = new HashMap<String, Float>();
        boosts.put("text1", 5.0f);
        boosts.put("text2", 2.0f);
        /**MultiFieldQueryParser??? 
         * */
        MultiFieldQueryParser parser = new MultiFieldQueryParser(fields, analyzer, boosts);
        Query query = parser.parse(keyword);

        System.out.println("Query = " + query);

        //?5?
        TopDocs topDocs = isearcher.search(query, 5);
        System.out.println("" + topDocs.totalHits);
        //
        ScoreDoc[] scoreDocs = topDocs.scoreDocs;
        for (int i = 0; i < topDocs.totalHits; i++) {
            Document targetDoc = isearcher.doc(scoreDocs[i].doc);
            System.out.println("" + targetDoc.toString());
        }

    } catch (CorruptIndexException e) {
        e.printStackTrace();
    } catch (LockObtainFailedException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    } catch (ParseException e) {
        e.printStackTrace();
    } finally {
        if (ireader1 != null) {
            try {
                ireader1.close();
                ireader2.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        if (directory1 != null) {
            try {
                directory1.close();
                directory2.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }
}

From source file:com.lucene.index.test.IKAnalyzerdemoMutilField.java

License:Apache License

/**
 * //from  w w  w  .  j  a  va 2s. c  o  m
 * ???
 * @param args
 */
public static void main(String[] args) {
    //Lucene Document??
    String fieldName = "text";
    //
    String text1 = "oracle?";
    String text2 = "?";
    String text3 = "?";

    //IKAnalyzer?
    Analyzer analyzer = new IKAnalyzer();

    Directory directory = null;
    IndexWriter iwriter = null;
    IndexReader ireader = null;
    IndexSearcher isearcher = null;
    try {
        //
        directory = new RAMDirectory();

        //?IndexWriterConfig

        IndexWriterConfig iwConfig = new IndexWriterConfig(analyzer);
        iwConfig.setOpenMode(OpenMode.CREATE_OR_APPEND);
        iwriter = new IndexWriter(directory, iwConfig);
        //
        Document doc1 = new Document();
        doc1.add(new StringField("ID", "10000", Field.Store.YES));
        doc1.add(new TextField(fieldName, text1, Field.Store.YES));
        iwriter.addDocument(doc1);

        Document doc2 = new Document();
        doc2.add(new StringField("ID", "10000", Field.Store.YES));
        doc2.add(new TextField(fieldName, text2, Field.Store.YES));
        iwriter.addDocument(doc2);

        Document doc3 = new Document();
        doc3.add(new StringField("ID", "10000", Field.Store.YES));
        doc3.add(new TextField(fieldName, text3, Field.Store.YES));
        iwriter.addDocument(doc3);
        iwriter.close();

        //?**********************************
        //?   
        ireader = DirectoryReader.open(directory);
        isearcher = new IndexSearcher(ireader);

        String keyword = "?";
        //QueryParser?Query
        QueryParser qp = new QueryParser(fieldName, analyzer);
        qp.setDefaultOperator(QueryParser.AND_OPERATOR);
        Query query = qp.parse(keyword);
        System.out.println("Query = " + query);

        //?5?
        TopDocs topDocs = isearcher.search(query, 5);
        System.out.println("" + topDocs.totalHits);
        //
        ScoreDoc[] scoreDocs = topDocs.scoreDocs;
        for (int i = 0; i < topDocs.totalHits; i++) {
            Document targetDoc = isearcher.doc(scoreDocs[i].doc);
            System.out.println("" + targetDoc.toString());
        }

    } catch (CorruptIndexException e) {
        e.printStackTrace();
    } catch (LockObtainFailedException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    } catch (ParseException e) {
        e.printStackTrace();
    } finally {
        if (ireader != null) {
            try {
                ireader.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        if (directory != null) {
            try {
                directory.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }
}

From source file:com.m3958.apps.pcms.lucene.facet.MultiCategoryListsFacetsExample.java

License:Apache License

/** User runs a query and counts facets. */
private List<FacetResult> search() throws IOException {
    DirectoryReader indexReader = DirectoryReader.open(indexDir);
    IndexSearcher searcher = new IndexSearcher(indexReader);
    TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);

    // Count both "Publish Date" and "Author" dimensions
    FacetSearchParams fsp = new FacetSearchParams(indexingParams,
            new CountFacetRequest(new CategoryPath("Publish Date"), 10),
            new CountFacetRequest(new CategoryPath("Author"), 10));

    // Aggregatses the facet counts
    FacetsCollector fc = FacetsCollector.create(fsp, searcher.getIndexReader(), taxoReader);

    // MatchAllDocsQuery is for "browsing" (counts facets
    // for all non-deleted docs in the index); normally
    // you'd use a "normal" query, and use MultiCollector to
    // wrap collecting the "normal" hits and also facets:
    searcher.search(new MatchAllDocsQuery(), fc);

    // Retrieve results
    List<FacetResult> facetResults = fc.getFacetResults();

    indexReader.close();//from www  .  jav  a 2s. c  om
    taxoReader.close();

    return facetResults;
}

From source file:com.m3958.apps.pcms.lucene.facet.SimpleFacetsExample.java

License:Apache License

/** User runs a query and counts facets. */
private List<FacetResult> search() throws IOException {
    DirectoryReader indexReader = DirectoryReader.open(indexDir);
    IndexSearcher searcher = new IndexSearcher(indexReader);
    TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);

    // Count both "Publish Date" and "Author" dimensions
    FacetSearchParams fsp = new FacetSearchParams(new CountFacetRequest(new CategoryPath("Publish Date"), 10),
            new CountFacetRequest(new CategoryPath("Author"), 10));

    // Aggregates the facet counts
    FacetsCollector fc = FacetsCollector.create(fsp, searcher.getIndexReader(), taxoReader);

    // MatchAllDocsQuery is for "browsing" (counts facets
    // for all non-deleted docs in the index); normally
    // you'd use a "normal" query, and use MultiCollector to
    // wrap collecting the "normal" hits and also facets:
    searcher.search(new MatchAllDocsQuery(), fc);

    // Retrieve results
    List<FacetResult> facetResults = fc.getFacetResults();

    indexReader.close();/* w  ww.j a va  2 s .co m*/
    taxoReader.close();

    return facetResults;
}