Example usage for org.apache.lucene.search Query rewrite

List of usage examples for org.apache.lucene.search Query rewrite

Introduction

In this page you can find the example usage for org.apache.lucene.search Query rewrite.

Prototype

public Query rewrite(IndexReader reader) throws IOException 

Source Link

Document

Expert: called to re-write queries into primitive queries.

Usage

From source file:GUIFrame.java

protected void GetTerms(Directory Index, PorterStemAnalyzer Analyzer, String field, String queryString)
        throws IOException, ParseException {
    Set<Term> results;// w w  w.  j av a 2 s  .  co  m

    IndexReader indoReader = DirectoryReader.open(Index);

    QueryParser qp = new QueryParser(Version.LUCENE_30, field, Analyzer);
    Query query = qp.parse(queryString);

    query.rewrite(indoReader);
    results = new HashSet<>();
    query.extractTerms(results);

    Iterator<Term> iterator = results.iterator();
    while (iterator.hasNext()) {
        String term = iterator.next().toString();
        TermListModel.addElement(term.substring(8));
    }
}

From source file:ExpansionFrame.java

private void ExpansionBtnActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_ExpansionBtnActionPerformed
    String vocabulary = "";
    DefaultListModel<String> TermsListModel = new DefaultListModel<>();

    for (int docIndex = 0; docIndex < ChoicesList.getModel().getSize(); docIndex++) {
        int currentIndex = 0;
        boolean exists = false;

        for (int resultDocIndex = 0; resultDocIndex < ResultsList.getModel().getSize(); resultDocIndex++) {
            if (ResultsList.getModel().getElementAt(resultDocIndex)
                    .equals(ChoicesList.getModel().getElementAt(docIndex))) {
                currentIndex = resultDocIndex;
                exists = true;/*from  w ww.j  ava 2 s  .co m*/
                break;
            }
        }

        if (exists) {
            Set<Term> results;

            IndexReader indoReader;
            try {
                indoReader = DirectoryReader.open(myIndex);

                QueryParser qp = new QueryParser(Version.LUCENE_30, "summary", myAnalyzer);
                Query query;

                query = qp.parse(SummaryListModel.elementAt(currentIndex));

                query.rewrite(indoReader);
                results = new HashSet<>();
                query.extractTerms(results);

                Iterator<Term> iterator = results.iterator();
                while (iterator.hasNext()) {
                    String term = iterator.next().toString();
                    vocabulary += term.substring(8) + ";";
                }

                vocabulary += "!";

            } catch (IOException | ParseException ex) {
                Logger.getLogger(ExpansionFrame.class.getName()).log(Level.SEVERE, null, ex);
            }
        } else {
            System.out.println("Error!");
        }
    }

    String term = "";

    for (int i = 0; i < vocabulary.length(); i++) {
        if (vocabulary.charAt(i) != ';' && vocabulary.charAt(i) != '!') {
            term += vocabulary.charAt(i);
        } else {
            if (term.length() != 0) {
                boolean exists = false;
                for (int k = 0; k < TermsListModel.size(); k++) {
                    if (term.equals(TermsListModel.elementAt(k))) {
                        exists = true;
                        break;
                    }
                }

                if (!exists) {
                    TermsListModel.addElement(term);
                    term = "";
                }
            }
        }
    }

    term = "";

    int[][] termsFreqArray = new int[TermsListModel.size()][ChoicesList.getModel().getSize()];

    for (int i = 0; i < TermsListModel.size(); i++) {
        for (int j = 0; j < ChoicesList.getModel().getSize(); j++) {
            termsFreqArray[i][j] = 0;
        }
    }

    int docIndex = 0;
    for (int i = 0; i < vocabulary.length(); i++) {
        if (vocabulary.charAt(i) != ';' && vocabulary.charAt(i) != '!') {
            term += vocabulary.charAt(i);
        } else {
            if (vocabulary.charAt(i) == '!') {
                docIndex++;
            }

            if (term.length() != 0) {
                int index = 0;
                for (int k = 0; k < TermsListModel.size(); k++) {
                    if (term.equals(TermsListModel.elementAt(k))) {
                        index = k;
                        break;
                    }
                }

                termsFreqArray[index][docIndex]++;

                term = "";
            }
        }
    }

    /*
    for (int i = 0; i < TermsListModel.size(); i++) {
    System.out.print(TermsListModel.elementAt(i) + " : ");
    for (int j = 0; j < ChoicesList.getModel().getSize(); j++) {
        System.out.print(termsFreqArray[i][j] + ";");
    }
    System.out.println();
    }
    */
}

From source file:ch.systemsx.cisd.openbis.generic.server.dataaccess.db.HibernateSearchDAO.java

License:Apache License

private static Query rewriteQuery(IndexReader indexReader, Query query) {
    try {//from  w ww .  j  a v a 2 s.c om
        return query.rewrite(indexReader);
    } catch (IOException ex) {
        logSearchHighlightingError(ex);
        return query;
    }
}

From source file:ci6226.eval_index_reader.java

public static void Searchit(IndexReader reader, IndexSearcher searcher, Analyzer _analyzer, String field,
        String[] _searchList, int _topn, PrintWriter writer)
        throws org.apache.lucene.queryparser.classic.ParseException, IOException, InvalidTokenOffsetsException {
    Analyzer analyzer = _analyzer;/*from   ww w.java  2  s. c  o m*/

    QueryParser parser = new QueryParser(Version.LUCENE_47, field, analyzer);

    String[] testString = _searchList;//{"to","123","impressed","Geezer","geezer","semi-busy","\"eggs vegetable\"","gs veget","\"gs veget\""};//,"good","I","but","coffee"};

    for (int j = 0; j < testString.length; j++) {
        String lstr = String.valueOf(j) + "," + testString[j];
        Query query = parser.parse(testString[j]);
        System.out.println("Searching for: " + query.toString(field));
        TopDocs topdocs = searcher.search(query, _topn);
        lstr += "," + topdocs.totalHits;
        ScoreDoc[] scoreDocs = topdocs.scoreDocs;
        SimpleHTMLFormatter htmlFormatter = new SimpleHTMLFormatter();
        Highlighter highlighter = new Highlighter(htmlFormatter, new QueryScorer(query.rewrite(reader)));
        for (int i = 0; i < scoreDocs.length; i++) {
            int doc = scoreDocs[i].doc;
            Document document = searcher.doc(doc);
            //      System.out.println("Snippet=" + document.get(field));
            System.out.println(i);
            String text = document.get(field);
            TokenStream tokenStream = TokenSources.getAnyTokenStream(searcher.getIndexReader(), doc, field,
                    analyzer);
            TextFragment[] frag = highlighter.getBestTextFragments(tokenStream, text, false, 10);//highlighter.getBestFragments(tokenStream, text, 3, "...");
            String line = "";
            for (int m = 0; m < frag.length; m++) {

                if ((frag[m] != null) && (frag[m].getScore() > 0)) {
                    System.out.println((frag[m].toString()));
                    line = frag[m].toString();
                    line = line.replaceAll("\n", "");
                    line = line.replaceAll("\r", "");
                    line = line.replaceAll("\"", "");
                    line = line.replaceAll(",", " ");

                }

            }
            lstr += "," + line;
            lstr += "," + String.valueOf(scoreDocs[i].score);

        }
        writer.write(lstr + "\n");
        System.out.println("Search for:" + testString[j] + " Total hits=" + scoreDocs.length);
        System.out.println("////////////////////////////////////////////////////");
    }

}

From source file:com.appeligo.search.actions.SearchResults.java

License:Apache License

public List<SearchResult> getSearchResults(int startIndex) {

    initializeStatics();/*from  ww w .  ja  v  a2 s  .  c  o m*/

    hasMoreResults = false;
    try {
        IndexSearcher searcher = null;

        try {
            searcher = newIndexSearcher();
            IndexReader reader = searcher.getIndexReader();

            Query luceneQuery = generateLuceneQuery(searcher);
            luceneQuery = luceneQuery.rewrite(reader);
            Hits hits = searcher.search(luceneQuery);

            usingSuggestedQuery = false;
            suggestedQuery = null;
            if ((didYouMeanParser != null)
                    && ((hits.length() < minimumHits) || (calcScore(searcher, getQuery()) < minimumScore))) {
                if (log.isDebugEnabled()) {
                    log.debug("Need to suggest because either num hits " + hits.length() + " < " + minimumHits
                            + "\n or top hit score " + (hits.length() > 0 ? hits.score(0) : "[NO HITS]") + " < "
                            + minimumScore);
                }
                IndexSearcher compositeSearcher = new IndexSearcher(compositeIndexLocation);
                try {
                    log.debug("calling suggest() with query=" + getQuery() + " and composite index from "
                            + compositeIndexLocation);
                    //Query didYouMean = didYouMeanParser.suggest(getQuery(), compositeSearcher.getIndexReader());
                    Query suggestedQueries[] = didYouMeanParser.getSuggestions(getQuery(),
                            compositeSearcher.getIndexReader());
                    TreeSet<Suggestion> suggestions = new TreeSet<Suggestion>();

                    if (suggestedQueries != null) {
                        for (int i = 0; i < suggestedQueries.length; i++) {
                            log.debug("trying suggested query: " + suggestedQueries[i].toString(defaultField));
                            String suggestedQueryString = suggestedQueries[i].toString(defaultField);
                            String constrainedQueryString = suggestedQueryString;
                            if (constrainedQueryString.indexOf('"') < 0
                                    && constrainedQueryString.indexOf('\'') < 0) {
                                constrainedQueryString = "\"" + constrainedQueryString + "\"~5"; // proximity/distance query (within 5 words of each other)
                            }
                            Query suggestedLuceneQuery = generateLuceneQuery(constrainedQueryString, searcher);
                            suggestedLuceneQuery = suggestedLuceneQuery.rewrite(reader);
                            Hits suggestedHits = searcher.search(suggestedLuceneQuery);

                            float score = calcScore(suggestedQueryString, suggestedHits);

                            log.debug("=========================================");
                            log.debug("SCORE = " + score);
                            log.debug("=========================================");

                            suggestions.add(
                                    new Suggestion(suggestedQueryString, suggestedLuceneQuery, suggestedHits,
                                            score, ((i == 0) ? didYouMeanParser.includesOriginal() : false)));
                            log.debug("hits=" + suggestedHits.length() + ", score=" + score);
                        }
                    }

                    Suggestion best = null;
                    if (suggestions.size() > 0) {
                        best = suggestions.last();
                    }

                    if (best != null && !best.isOriginal()) {
                        suggestedQuery = best.getQueryString();
                        if (suggestedQuery != null && suggestedQuery.indexOf('+') >= 0
                                && getQuery().indexOf('+') < 0) {
                            suggestedQuery = suggestedQuery.replaceAll("\\+", "");
                        }
                        if (hits.length() == 0) {
                            if (best.getHits().length() > 0) {
                                // Requery probably required because we added proximity before
                                String suggestedQueryString = best.getQueryString();
                                luceneQuery = generateLuceneQuery(suggestedQueryString, searcher);
                                luceneQuery = luceneQuery.rewrite(reader);
                                hits = searcher.search(luceneQuery);
                                //hits = best.getHits();
                                //luceneQuery = best.getLuceneQuery();
                                usingSuggestedQuery = true;
                            }
                        }
                        log.debug("DidYouMeanParser suggested " + suggestedQuery);
                    } else {
                        if (best != null && best.isOriginal()) {
                            log.debug("The suggestion was the original query after all");
                        }
                        log.debug("DidYouMeanParser did not suggest anything");
                    }
                } finally {
                    compositeSearcher.close();
                }
            }
            /*
            if (hits.length() == 0 && suggestedQuery != null) {
            // If we didn't find anything at all, go ahead and show them what the suggested query
            // will give them
            Query suggestedLuceneQuery = generateLuceneQuery(suggestedQuery, searcher);
            suggestedLuceneQuery = suggestedLuceneQuery.rewrite(reader);
               Hits suggestedHits = searcher.search(suggestedLuceneQuery);
               if (suggestedHits.length() > 0) {
             hits = suggestedHits;
             luceneQuery = suggestedLuceneQuery;
             usingSuggestedQuery = true;
               }
            }
               */
            totalHits = hits.length();
            //Get the genere matches:
            try {
                BitSetFacetHitCounter facetHitCounter = new BitSetFacetHitCounter();
                facetHitCounter.setSearcher(searcher);
                String baseQueryString = (isUsingSuggestedQuery() ? suggestedQuery : query);
                String quotedQueryString = baseQueryString;
                if (quotedQueryString.indexOf('"') == -1 && quotedQueryString.indexOf(' ') > -1) {
                    quotedQueryString = "\"" + quotedQueryString + "\"";
                }
                facetHitCounter.setBaseQuery(luceneQuery, baseQueryString);

                List<HitCount> subQueries = new ArrayList<HitCount>();
                for (Map.Entry<String, Query> entry : genreQueries.entrySet()) {
                    subQueries.add(
                            new HitCount(entry.getKey(), entry.getValue(), entry.getValue().toString(), 0));
                }
                facetHitCounter.setSubQueries(subQueries);
                genreCounts = facetHitCounter.getFacetHitCounts(true);

                whatMatchedCounts = new ArrayList<HitCount>();
                whatMatchedCounts
                        .add(new HitCount("Title", getFieldQuery(baseQueryString, "programTitle", searcher),
                                "programTitle:" + quotedQueryString, 0));
                whatMatchedCounts.add(
                        new HitCount("Episode Title", getFieldQuery(baseQueryString, "episodeTitle", searcher),
                                "episodeTitle:" + quotedQueryString, 0));
                whatMatchedCounts.add(
                        new HitCount("Description", getFieldQuery(baseQueryString, "description", searcher),
                                "description:" + quotedQueryString, 0));
                whatMatchedCounts.add(new HitCount("Content", getFieldQuery(baseQueryString, "text", searcher),
                        "text:" + quotedQueryString, 0));
                whatMatchedCounts
                        .add(new HitCount("Credits", getFieldQuery(baseQueryString, "credits", searcher),
                                "credits:" + quotedQueryString, 0));
                facetHitCounter.setSubQueries(whatMatchedCounts);
                whatMatchedCounts = facetHitCounter.getFacetHitCounts(true);

                //Program Count  -- Not sure if there is a better way to do this.
                HashSet<String> programTitles = new HashSet<String>();
                programCounts = new ArrayList<HitCount>();
                for (int i = 0; i < hits.length() && programCounts.size() < 5; i++) {
                    String title = hits.doc(i).get("programTitle");
                    if (!programTitles.contains(title)) {
                        String queryTitle = title;
                        queryTitle = QueryParser.escape(title);
                        if (queryTitle.indexOf('"') > -1) {
                            queryTitle.replace("\"", "\\\"");
                        }
                        if (queryTitle.indexOf(' ') > -1) {
                            queryTitle = "\"" + queryTitle + "\"";
                        }

                        programCounts
                                .add(new HitCount(title, getFieldQuery(queryTitle, "programTitle", searcher),
                                        "programTitle:" + queryTitle, 0));
                        programTitles.add(title);
                    }
                }
                facetHitCounter.setSubQueries(programCounts);
                programCounts = facetHitCounter.getFacetHitCounts(false);
            } catch (Exception e) {
                e.printStackTrace();
            }

            results = new ArrayList<SearchResult>();
            programToSearchResult.clear();
            Query userQuery = getContentQuery(query, searcher);
            userQuery.rewrite(reader);
            Highlighter highlighter = new Highlighter(new TermFormatter(), new QueryScorer(userQuery, "text"));

            log.debug("#hits=" + hits.length());

            EPGProvider epgProvider = DefaultEpg.getInstance();

            boolean missingWebPaths = false; // We added this to the index midstream, so some do and some don't.
            // Next index rebuild, and they'll all have it.
            for (int i = 0; i < pageSize && i + startIndex < hits.length(); i++) {
                if (hits.doc(i + startIndex).get("webPath") == null) {
                    missingWebPaths = true;
                    break;
                }
            }
            Program[] programs = null;
            if (missingWebPaths) {
                List<String> programIds = new ArrayList<String>(pageSize);
                for (int i = 0; i < pageSize && i + startIndex < hits.length(); i++) {
                    programIds.add(hits.doc(i + startIndex).get("programID"));
                }
                programs = DefaultEpg.getInstance().getProgramList(programIds);
            }
            for (int i = 0; i < pageSize && i + startIndex < hits.length(); i++) {
                addDocument(hits.doc(i + startIndex), hits.score(i + startIndex), epgProvider, highlighter,
                        analyzer, null, null, (programs == null ? null : programs[i]));
            }
            if (results.size() + startIndex < hits.length()) {
                hasMoreResults = true;
            }
        } finally {
            if (searcher != null) {
                searcher.close();
            }
        }
    } catch (IOException e) {
        log.error("Error searching index", e);
    } catch (ParseException e) {
        log.error("Error searching index", e);
    }
    return results;
}

From source file:com.appeligo.search.actions.SearchResults.java

License:Apache License

private float calcScore(IndexSearcher searcher, String suggestedQueryString)
        throws IOException, ParseException {
    String constrainedQueryString = suggestedQueryString;
    if (constrainedQueryString.indexOf('"') < 0 && constrainedQueryString.indexOf('\'') < 0) {
        constrainedQueryString = "\"" + constrainedQueryString + "\"~5"; // proximity/distance query (within 5 words of each other)
    }//ww w  .j ava2 s.  c om
    Query luceneQuery = generateLuceneQuery(constrainedQueryString, searcher);
    luceneQuery = luceneQuery.rewrite(searcher.getIndexReader());
    Hits hits = searcher.search(luceneQuery);
    float score = calcScore(suggestedQueryString, hits);
    return score;
}

From source file:com.doculibre.constellio.lucene.BaseLuceneIndexHelper.java

License:Open Source License

public String highlight(String strToHighlight, String fieldName, Query luceneQuery) {
    String highlightedText;/* w  w w .jav a  2s.  co  m*/
    Analyzer analyzer = analyzerProvider.getAnalyzer(Locale.FRENCH);
    try {
        Directory directory = FSDirectory.open(indexDir);
        IndexReader indexReader = DirectoryReader.open(directory);
        Query rewrittenLuceneQuery = luceneQuery.rewrite(indexReader);
        QueryScorer luceneScorer = new QueryScorer(rewrittenLuceneQuery);
        SimpleHTMLFormatter luceneFormatter = new SimpleHTMLFormatter("<span class=\"hit\">", "</span>");
        Highlighter luceneHighlighter = new Highlighter(luceneFormatter, luceneScorer);

        Fragmenter luceneFragmenter;
        // Si la chaine  highlighter est sup  250 carac
        if (strToHighlight.length() > TAILLE_CHAINE_NON_FRAGMENTEE) {
            // Cration de best fragments de 100 carac chaque
            luceneFragmenter = new SimpleFragmenter(TAILLE_FRAGMENT);
        } else {
            // Toute la chaine est highlight
            luceneFragmenter = new SimpleFragmenter(Integer.MAX_VALUE);
        }
        luceneHighlighter.setTextFragmenter(luceneFragmenter);

        TokenStream luceneTokenStream = analyzer.tokenStream(fieldName, new StringReader(strToHighlight));
        String fragment = null;
        if (strToHighlight.length() > TAILLE_CHAINE_NON_FRAGMENTEE) {
            fragment = luceneHighlighter.getBestFragments(luceneTokenStream, strToHighlight, NB_BEST_FRAGMENT,
                    FRAGMENT_SEP);
        } else {
            fragment = luceneHighlighter.getBestFragment(luceneTokenStream, strToHighlight);
        }

        if (StringUtils.isBlank(fragment) && fieldName.equalsIgnoreCase("titre")) {
            fragment = strToHighlight;
        }
        indexReader.close();
        directory.close();

        highlightedText = fragment;
    } catch (IOException e) {
        throw new RuntimeException(e);
    } catch (InvalidTokenOffsetsException e) {
        throw new RuntimeException(e);
    }
    return highlightedText;
}

From source file:com.github.rnewson.couchdb.lucene.DatabaseIndexer.java

License:Apache License

public void search(final HttpServletRequest req, final HttpServletResponse resp)
        throws IOException, JSONException {
    final IndexState state = getState(req, resp);
    if (state == null)
        return;/*from ww  w. j ava 2 s . c om*/
    final IndexSearcher searcher = state.borrowSearcher(isStaleOk(req));
    final String etag = state.getEtag();
    final FastVectorHighlighter fvh = new FastVectorHighlighter(true, true);
    final JSONArray result = new JSONArray();
    try {
        if (state.notModified(req)) {
            resp.setStatus(304);
            return;
        }
        for (final String queryString : getQueryStrings(req)) {
            final Analyzer analyzer = state.analyzer(req.getParameter("analyzer"));
            final Operator operator = "and".equalsIgnoreCase(req.getParameter("default_operator"))
                    ? Operator.AND
                    : Operator.OR;
            final Query q = state.parse(queryString, operator, analyzer);

            final JSONObject queryRow = new JSONObject();
            queryRow.put("q", q.toString());
            if (getBooleanParameter(req, "debug")) {
                queryRow.put("plan", QueryPlan.toPlan(q));
                queryRow.put("analyzer", analyzer.getClass());
            }
            queryRow.put("etag", etag);
            if (getBooleanParameter(req, "rewrite")) {
                final Query rewritten_q = q.rewrite(searcher.getIndexReader());
                queryRow.put("rewritten_q", rewritten_q.toString());

                final JSONObject freqs = new JSONObject();

                final Set<Term> terms = new HashSet<Term>();
                rewritten_q.extractTerms(terms);
                for (final Object term : terms) {
                    final int freq = searcher.getIndexReader().docFreq((Term) term);
                    freqs.put(term.toString(), freq);
                }
                queryRow.put("freqs", freqs);
            } else {
                // Perform the search.
                final TopDocs td;
                final StopWatch stopWatch = new StopWatch();

                final boolean include_docs = getBooleanParameter(req, "include_docs");
                final int highlights = getIntParameter(req, "highlights", 0);
                final int highlight_length = max(getIntParameter(req, "highlight_length", 18), 18); // min for fast term vector highlighter is 18
                final boolean include_termvectors = getBooleanParameter(req, "include_termvectors");
                final int limit = getIntParameter(req, "limit", ini.getInt("lucene.limit", 25));
                final Sort sort = CustomQueryParser.toSort(req.getParameter("sort"));
                final int skip = getIntParameter(req, "skip", 0);

                final Set<String> fieldsToLoad;
                if (req.getParameter("include_fields") == null) {
                    fieldsToLoad = null;
                } else {
                    final String[] fields = Utils.splitOnCommas(req.getParameter("include_fields"));
                    final List<String> list = Arrays.asList(fields);
                    fieldsToLoad = new HashSet<String>(list);
                }

                if (sort == null) {
                    td = searcher.search(q, null, skip + limit);
                } else {
                    td = searcher.search(q, null, skip + limit, sort);
                }
                stopWatch.lap("search");

                // Fetch matches (if any).
                final int max = Math.max(0, Math.min(td.totalHits - skip, limit));
                final JSONArray rows = new JSONArray();
                final String[] fetch_ids = new String[max];
                for (int i = skip; i < skip + max; i++) {
                    final Document doc;
                    if (fieldsToLoad == null) {
                        doc = searcher.doc(td.scoreDocs[i].doc);
                    } else {
                        doc = searcher.doc(td.scoreDocs[i].doc, fieldsToLoad);
                    }

                    final JSONObject row = new JSONObject();
                    final JSONObject fields = new JSONObject();
                    final JSONObject highlight_rows = new JSONObject();

                    // Include stored fields.
                    for (final IndexableField f : doc.getFields()) {
                        if (!f.fieldType().stored()) {
                            continue;
                        }
                        final String name = f.name();
                        final Object value;
                        if (f.numericValue() != null) {
                            value = f.numericValue();
                        } else {
                            value = f.stringValue();
                        }
                        if (value != null) {
                            if ("_id".equals(name)) {
                                row.put("id", value);
                            } else {
                                if (!fields.has(name)) {
                                    fields.put(name, value);
                                } else {
                                    final Object obj = fields.get(name);
                                    if (obj instanceof String || obj instanceof Number) {
                                        final JSONArray arr = new JSONArray();
                                        arr.put(obj);
                                        arr.put(value);
                                        fields.put(name, arr);
                                    } else {
                                        assert obj instanceof JSONArray;
                                        ((JSONArray) obj).put(value);
                                    }
                                }

                                if (highlights > 0) {
                                    String[] frags = fvh.getBestFragments(fvh.getFieldQuery(q),
                                            searcher.getIndexReader(), td.scoreDocs[i].doc, name,
                                            highlight_length, highlights);
                                    highlight_rows.put(name, frags);
                                }
                            }
                        }
                    }

                    if (!Float.isNaN(td.scoreDocs[i].score)) {
                        row.put("score", td.scoreDocs[i].score);
                    } // Include sort order (if any).
                    if (td instanceof TopFieldDocs) {
                        final FieldDoc fd = (FieldDoc) ((TopFieldDocs) td).scoreDocs[i];
                        row.put("sort_order", fd.fields);
                    }
                    // Fetch document (if requested).
                    if (include_docs) {
                        fetch_ids[i - skip] = doc.get("_id");
                    }
                    if (fields.length() > 0) {
                        row.put("fields", fields);
                    }
                    if (highlight_rows.length() > 0) {
                        row.put("highlights", highlight_rows);
                    }

                    rows.put(row);
                }
                // Fetch documents (if requested).
                if (include_docs && fetch_ids.length > 0) {
                    final List<CouchDocument> fetched_docs = database.getDocuments(fetch_ids);
                    for (int j = 0; j < max; j++) {
                        final CouchDocument doc = fetched_docs.get(j);
                        final JSONObject row = doc == null ? new JSONObject("{\"error\":\"not_found\"}")
                                : doc.asJson();
                        rows.getJSONObject(j).put("doc", row);
                    }
                }
                stopWatch.lap("fetch");

                queryRow.put("skip", skip);
                queryRow.put("limit", limit);
                queryRow.put("total_rows", td.totalHits);
                queryRow.put("search_duration", stopWatch.getElapsed("search"));
                queryRow.put("fetch_duration", stopWatch.getElapsed("fetch"));
                // Include sort info (if requested).
                if (td instanceof TopFieldDocs) {
                    queryRow.put("sort_order", CustomQueryParser.toJSON(((TopFieldDocs) td).fields));
                }
                queryRow.put("rows", rows);
            }
            result.put(queryRow);
        }
    } catch (final ParseException e) {
        ServletUtils.sendJsonError(req, resp, 400, "Bad query syntax: " + e.getMessage());
        return;
    } finally {
        state.returnSearcher(searcher);
    }

    resp.setHeader("ETag", etag);
    resp.setHeader("Cache-Control", "must-revalidate");
    ServletUtils.setResponseContentTypeAndEncoding(req, resp);

    final Object json = result.length() > 1 ? result : result.getJSONObject(0);
    final String callback = req.getParameter("callback");
    final String body;
    if (callback != null) {
        body = String.format("%s(%s)", callback, json);
    } else {
        if (json instanceof JSONObject) {
            final JSONObject obj = (JSONObject) json;
            body = getBooleanParameter(req, "debug") ? obj.toString(2) : obj.toString();
        } else {
            final JSONArray arr = (JSONArray) json;
            body = getBooleanParameter(req, "debug") ? arr.toString(2) : arr.toString();
        }
    }

    final Writer writer = resp.getWriter();
    try {
        writer.write(body);
    } finally {
        writer.close();
    }
}

From source file:com.jaeksoft.searchlib.index.ReaderLocal.java

License:Open Source License

@Override
public Query rewrite(Query query) throws SearchLibException {
    try {/* w  ww  .  j  a v  a 2 s  .c o  m*/
        return query.rewrite(indexReader);
    } catch (IOException e) {
        throw new SearchLibException(e);
    }
}

From source file:com.joliciel.jochre.search.highlight.LuceneQueryHighlighter.java

License:Open Source License

/** 
 * we rewrite against an empty indexreader: as we don't want things like
 * rangeQueries that don't summarize the document
 *//*from  ww  w .ja  v  a 2s  .co m*/
private static Query rewrite(Query original) {
    try {
        Query query = original;
        for (Query rewrittenQuery = query
                .rewrite(EMPTY_INDEXREADER); rewrittenQuery != query; rewrittenQuery = query
                        .rewrite(EMPTY_INDEXREADER)) {
            query = rewrittenQuery;
        }
        return query;
    } catch (IOException e) {
        LogUtils.logError(LOG, e);
        throw new RuntimeException(e);
    }
}