List of usage examples for org.apache.lucene.search BooleanQuery BooleanQuery
BooleanQuery
From source file:com.querydsl.lucene5.AbstractLuceneQuery.java
License:Apache License
protected Query createQuery() { Query returnedQuery = null;/*ww w . j av a 2 s . c om*/ Query originalQuery = null; if (queryMixin.getMetadata().getWhere() == null) { originalQuery = new MatchAllDocsQuery(); } else { originalQuery = serializer.toQuery(queryMixin.getMetadata().getWhere(), queryMixin.getMetadata()); } Filter filter = getFilter(); if (filter != null) { BooleanQuery booleanQuery = new BooleanQuery(); booleanQuery.add(originalQuery, Occur.MUST); booleanQuery.add(filter, Occur.FILTER); returnedQuery = booleanQuery; } else { returnedQuery = originalQuery; } return returnedQuery; }
From source file:com.querydsl.lucene5.AbstractLuceneQuery.java
License:Apache License
private Filter getFilter() { if (filter == null && !filters.isEmpty()) { BooleanQuery filterQuery = new BooleanQuery(); for (Filter filter : filters) { filterQuery.add(filter, Occur.SHOULD); }/*from w w w .j a v a 2s.c o m*/ filter = new QueryWrapperFilter(filterQuery); } return filter; }
From source file:com.querydsl.lucene5.LuceneSerializer.java
License:Apache License
private Query toQuery(Operation<?> operation, QueryMetadata metadata) { Operator op = operation.getOperator(); if (op == Ops.OR) { return toTwoHandSidedQuery(operation, Occur.SHOULD, metadata); } else if (op == Ops.AND) { return toTwoHandSidedQuery(operation, Occur.MUST, metadata); } else if (op == Ops.NOT) { BooleanQuery bq = new BooleanQuery(); bq.add(new BooleanClause(toQuery(operation.getArg(0), metadata), Occur.MUST_NOT)); bq.add(new BooleanClause(new MatchAllDocsQuery(), Occur.MUST)); return bq; } else if (op == Ops.LIKE) { return like(operation, metadata); } else if (op == Ops.EQ) { return eq(operation, metadata, false); } else if (op == Ops.EQ_IGNORE_CASE) { throw new IgnoreCaseUnsupportedException(); } else if (op == Ops.NE) { return ne(operation, metadata, false); } else if (op == Ops.STARTS_WITH) { return startsWith(metadata, operation, false); } else if (op == Ops.STARTS_WITH_IC) { throw new IgnoreCaseUnsupportedException(); } else if (op == Ops.ENDS_WITH) { return endsWith(operation, metadata, false); } else if (op == Ops.ENDS_WITH_IC) { throw new IgnoreCaseUnsupportedException(); } else if (op == Ops.STRING_CONTAINS) { return stringContains(operation, metadata, false); } else if (op == Ops.STRING_CONTAINS_IC) { throw new IgnoreCaseUnsupportedException(); } else if (op == Ops.BETWEEN) { return between(operation, metadata); } else if (op == Ops.IN) { return in(operation, metadata, false); } else if (op == Ops.NOT_IN) { return notIn(operation, metadata, false); } else if (op == Ops.LT) { return lt(operation, metadata); } else if (op == Ops.GT) { return gt(operation, metadata); } else if (op == Ops.LOE) { return le(operation, metadata); } else if (op == Ops.GOE) { return ge(operation, metadata); } else if (op == LuceneOps.LUCENE_QUERY) { @SuppressWarnings("unchecked") // this is the expected type Constant<Query> expectedConstant = (Constant<Query>) operation.getArg(0); return expectedConstant.getConstant(); }/*from w ww . j a v a 2s . c o m*/ throw new UnsupportedOperationException("Illegal operation " + operation); }
From source file:com.querydsl.lucene5.LuceneSerializer.java
License:Apache License
protected Query in(Operation<?> operation, QueryMetadata metadata, boolean ignoreCase) { Path<?> path = getPath(operation.getArg(0)); String field = toField(path); @SuppressWarnings("unchecked") // this is the expected type Constant<Collection<?>> expectedConstant = (Constant<Collection<?>>) operation.getArg(1); Collection<?> values = expectedConstant.getConstant(); BooleanQuery bq = new BooleanQuery(); if (Number.class.isAssignableFrom(path.getType())) { for (Object value : values) { TermQuery eq = new TermQuery(new Term(field, convertNumber((Number) value))); bq.add(eq, Occur.SHOULD);// w w w .j a v a 2s. co m } } else { for (Object value : values) { String[] str = convert(path, value); bq.add(eq(field, str, ignoreCase), Occur.SHOULD); } } return bq; }
From source file:com.recomdata.search.Finder.java
License:Open Source License
private Query buildQuery(LinkedHashMap<String, ArrayList<String>> searchTerms) { BooleanQuery andQuery = new BooleanQuery(); for (String key : searchTerms.keySet()) { ArrayList<String> list = searchTerms.get(key); ArrayList<Query> queries = new ArrayList<Query>(); for (String value : list) { if (value.indexOf(" ") == -1) { Term term = new Term("contents", value.toLowerCase()); TermQuery termQuery = new TermQuery(term); queries.add(termQuery);//from w w w . java 2s . com } else { String[] values = value.split(" "); PhraseQuery phraseQuery = new PhraseQuery(); for (String v : values) { Term term = new Term("contents", v.toLowerCase()); phraseQuery.add(term); } queries.add(phraseQuery); } } addQueries(andQuery, queries); } return andQuery; }
From source file:com.recomdata.search.Finder.java
License:Open Source License
private Filter buildFilter(LinkedHashMap<String, ArrayList<String>> filterTerms) { BooleanQuery andQuery = new BooleanQuery(); if (filterTerms.containsKey("REPOSITORY")) { ArrayList<String> list = filterTerms.get("REPOSITORY"); ArrayList<Query> queries = new ArrayList<Query>(); for (String value : list) { Term term = new Term("repository", value); TermQuery termQuery = new TermQuery(term); queries.add(termQuery);/*from w ww . j av a 2s .c om*/ } addQueries(andQuery, queries); } if (filterTerms.containsKey("PATH")) { try { ArrayList<String> list = filterTerms.get("PATH"); if (list.size() > 0) { StringReader reader = new StringReader(list.get(0)); StandardAnalyzer analyzer = new StandardAnalyzer(); TokenStream tokenizer = analyzer.tokenStream("path", reader); PhraseQuery phraseQuery = new PhraseQuery(); Token token = new Token(); for (token = tokenizer.next(token); token != null; token = tokenizer.next(token)) { Term term = new Term("path", token.term()); phraseQuery.add(term); } andQuery.add(phraseQuery, BooleanClause.Occur.MUST); } } catch (IOException ex) { // do nothing } } if (filterTerms.containsKey("EXTENSION")) { ArrayList<String> list = filterTerms.get("EXTENSION"); ArrayList<Query> queries = new ArrayList<Query>(); for (String value : list) { Term term = new Term("extension", value.toLowerCase()); TermQuery termQuery = new TermQuery(term); queries.add(termQuery); } addQueries(andQuery, queries); } if (filterTerms.containsKey("NOTEXTENSION")) { ArrayList<String> list = filterTerms.get("NOTEXTENSION"); for (String value : list) { Term term = new Term("extension", value.toLowerCase()); TermQuery termQuery = new TermQuery(term); andQuery.add(termQuery, BooleanClause.Occur.MUST_NOT); } } if (andQuery.clauses().size() > 0) { return new QueryWrapperFilter(andQuery); } return null; }
From source file:com.recomdata.search.Finder.java
License:Open Source License
private void addQueries(BooleanQuery andQuery, ArrayList<Query> queries) { if (queries.size() == 1) { andQuery.add(queries.get(0), BooleanClause.Occur.MUST); } else if (queries.size() > 1) { BooleanQuery orQuery = new BooleanQuery(); for (Query query : queries) { orQuery.add(query, BooleanClause.Occur.SHOULD); }/*from ww w. j av a2 s . co m*/ andQuery.add(orQuery, BooleanClause.Occur.MUST); } }
From source file:com.search.lucene.demo.facet.DistanceFacetsExample.java
License:Apache License
/** Given a latitude and longitude (in degrees) and the * maximum great circle (surface of the earth) distance, * returns a simple Filter bounding box to "fast match" * candidates. *///w w w . j a va2s .c o m public static Filter getBoundingBoxFilter(double originLat, double originLng, double maxDistanceKM) { // Basic bounding box geo math from // http://JanMatuschek.de/LatitudeLongitudeBoundingCoordinates, // licensed under creative commons 3.0: // http://creativecommons.org/licenses/by/3.0 // TODO: maybe switch to recursive prefix tree instead // (in lucene/spatial)? It should be more efficient // since it's a 2D trie... // Degrees -> Radians: double originLatRadians = Math.toRadians(originLat); double originLngRadians = Math.toRadians(originLng); double angle = maxDistanceKM / (SloppyMath.earthDiameter(originLat) / 2.0); double minLat = originLatRadians - angle; double maxLat = originLatRadians + angle; double minLng; double maxLng; if (minLat > Math.toRadians(-90) && maxLat < Math.toRadians(90)) { double delta = Math.asin(Math.sin(angle) / Math.cos(originLatRadians)); minLng = originLngRadians - delta; if (minLng < Math.toRadians(-180)) { minLng += 2 * Math.PI; } maxLng = originLngRadians + delta; if (maxLng > Math.toRadians(180)) { maxLng -= 2 * Math.PI; } } else { // The query includes a pole! minLat = Math.max(minLat, Math.toRadians(-90)); maxLat = Math.min(maxLat, Math.toRadians(90)); minLng = Math.toRadians(-180); maxLng = Math.toRadians(180); } BooleanQuery f = new BooleanQuery(); // Add latitude range filter: f.add(NumericRangeQuery.newDoubleRange("latitude", Math.toDegrees(minLat), Math.toDegrees(maxLat), true, true), BooleanClause.Occur.FILTER); // Add longitude range filter: if (minLng > maxLng) { // The bounding box crosses the international date // line: BooleanQuery lonF = new BooleanQuery(); lonF.add(NumericRangeQuery.newDoubleRange("longitude", Math.toDegrees(minLng), null, true, true), BooleanClause.Occur.SHOULD); lonF.add(NumericRangeQuery.newDoubleRange("longitude", null, Math.toDegrees(maxLng), true, true), BooleanClause.Occur.SHOULD); f.add(lonF, BooleanClause.Occur.MUST); } else { f.add(NumericRangeQuery.newDoubleRange("longitude", Math.toDegrees(minLng), Math.toDegrees(maxLng), true, true), BooleanClause.Occur.FILTER); } return new QueryWrapperFilter(f); }
From source file:com.searchbox.solr.CategoryLikeThis.java
License:Apache License
@Override public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { numRequests++;/*from w ww . j a v a2s . co m*/ long startTime = System.currentTimeMillis(); if (!keystate) { LOGGER.error( "License key failure, not performing clt query. Please email contact@searchbox.com for more information."); return; } try { SolrParams params = req.getParams(); String senseField = params.get(SenseParams.SENSE_FIELD, SenseParams.DEFAULT_SENSE_FIELD); BooleanQuery catfilter = new BooleanQuery(); // Set field flags ReturnFields returnFields = new SolrReturnFields(req); rsp.setReturnFields(returnFields); int flags = 0; if (returnFields.wantsScore()) { flags |= SolrIndexSearcher.GET_SCORES; } String defType = params.get(QueryParsing.DEFTYPE, QParserPlugin.DEFAULT_QTYPE); String q = params.get(CommonParams.Q); Query query = null; SortSpec sortSpec = null; List<Query> filters = new LinkedList<Query>(); List<RealTermFreqVector> prototypetfs = new LinkedList<RealTermFreqVector>(); try { if (q != null) { QParser parser = QParser.getParser(q, defType, req); query = parser.getQuery(); sortSpec = parser.getSort(true); } String[] fqs = req.getParams().getParams(CommonParams.FQ); if (fqs != null && fqs.length != 0) { for (String fq : fqs) { if (fq != null && fq.trim().length() != 0) { QParser fqp = QParser.getParser(fq, null, req); filters.add(fqp.getQuery()); } } } } catch (Exception e) { numErrors++; throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e); } SolrIndexSearcher searcher = req.getSearcher(); DocListAndSet cltDocs = null; // Parse Required Params // This will either have a single Reader or valid query Reader reader = null; try { if (q == null || q.trim().length() < 1) { Iterable<ContentStream> streams = req.getContentStreams(); if (streams != null) { Iterator<ContentStream> iter = streams.iterator(); if (iter.hasNext()) { reader = iter.next().getReader(); } if (iter.hasNext()) { numErrors++; throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "SenseLikeThis does not support multiple ContentStreams"); } } } int start = params.getInt(CommonParams.START, 0); int rows = params.getInt(CommonParams.ROWS, 10); // Find documents SenseLikeThis - either with a reader or a query // -------------------------------------------------------------------------------- if (reader != null) { numErrors++; throw new RuntimeException("SLT based on a reader is not yet implemented"); } else if (q != null) { LOGGER.debug("Query for category:\t" + query); DocList match = searcher.getDocList(query, null, null, 0, 10, flags); // get first 10 if (match.size() == 0) { // no docs to make prototype! LOGGER.info("No documents found for prototype!"); rsp.add("response", new DocListAndSet()); return; } HashMap<String, Float> overallFreqMap = new HashMap<String, Float>(); // Create the TF of blah blah blah DocIterator iterator = match.iterator(); while (iterator.hasNext()) { // do a MoreLikeThis query for each document in results int id = iterator.nextDoc(); LOGGER.trace("Working on doc:\t" + id); RealTermFreqVector rtv = new RealTermFreqVector(id, searcher.getIndexReader(), senseField); for (int zz = 0; zz < rtv.getSize(); zz++) { Float prev = overallFreqMap.get(rtv.getTerms()[zz]); if (prev == null) { prev = 0f; } overallFreqMap.put(rtv.getTerms()[zz], rtv.getFreqs()[zz] + prev); } prototypetfs.add(rtv); } List<String> sortedKeys = Ordering.natural().onResultOf(Functions.forMap(overallFreqMap)) .immutableSortedCopy(overallFreqMap.keySet()); int keyiter = Math.min(sortedKeys.size() - 1, BooleanQuery.getMaxClauseCount() - 1); LOGGER.debug("I have this many terms:\t" + sortedKeys.size()); LOGGER.debug("And i'm going to use this many:\t" + keyiter); for (; keyiter >= 0; keyiter--) { TermQuery tq = new TermQuery(new Term(senseField, sortedKeys.get(keyiter))); catfilter.add(tq, BooleanClause.Occur.SHOULD); } } else { numErrors++; throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "CategoryLikeThis requires either a query (?q=) or text to find similar documents."); } LOGGER.debug("document filter is: \t" + catfilter); CategorizationBase model = new CategorizationBase(prototypetfs); CategoryQuery clt = CategoryQuery.CategoryQueryForDocument(catfilter, model, searcher.getIndexReader(), senseField); DocSet filtered = searcher.getDocSet(filters); cltDocs = searcher.getDocListAndSet(clt, filtered, Sort.RELEVANCE, start, rows, flags); } finally { if (reader != null) { reader.close(); } } if (cltDocs == null) { numEmpty++; cltDocs = new DocListAndSet(); // avoid NPE } rsp.add("response", cltDocs.docList); // maybe facet the results if (params.getBool(FacetParams.FACET, false)) { if (cltDocs.docSet == null) { rsp.add("facet_counts", null); } else { SimpleFacets f = new SimpleFacets(req, cltDocs.docSet, params); rsp.add("facet_counts", f.getFacetCounts()); } } } catch (Exception e) { numErrors++; } finally { totalTime += System.currentTimeMillis() - startTime; } }
From source file:com.searchbox.solr.SenseLikeThisHandler.java
License:Apache License
@Override public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { NamedList<Object> timinginfo = new NamedList<Object>(); numRequests++;//from w ww . j a v a 2 s .c o m long startTime = System.currentTimeMillis(); long lstartTime = System.currentTimeMillis(); if (!keystate) { LOGGER.error( "License key failure, not performing sense query. Please email contact@searchbox.com for more information."); return; } boolean fromcache = false; try { SolrParams params = req.getParams(); int start = params.getInt(CommonParams.START, 0); int rows = params.getInt(CommonParams.ROWS, 10); HashSet<String> toIgnore = (new HashSet<String>()); toIgnore.add("start"); toIgnore.add("rows"); toIgnore.add("fl"); toIgnore.add("wt"); toIgnore.add("indent"); SolrCacheKey key = new SolrCacheKey(params, toIgnore); // Set field flags ReturnFields returnFields = new SolrReturnFields(req); rsp.setReturnFields(returnFields); int flags = 0; if (returnFields.wantsScore()) { flags |= SolrIndexSearcher.GET_SCORES; } String defType = params.get(QueryParsing.DEFTYPE, QParserPlugin.DEFAULT_QTYPE); String q = params.get(CommonParams.Q); Query query = null; QueryReductionFilter qr = null; SortSpec sortSpec = null; List<Query> filters = new ArrayList<Query>(); try { if (q != null) { QParser parser = QParser.getParser(q, defType, req); query = parser.getQuery(); sortSpec = parser.getSort(true); } String[] fqs = req.getParams().getParams(CommonParams.FQ); if (fqs != null && fqs.length != 0) { for (String fq : fqs) { if (fq != null && fq.trim().length() != 0) { QParser fqp = QParser.getParser(fq, null, req); filters.add(fqp.getQuery()); } } } } catch (Exception e) { numErrors++; throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e); } timinginfo.add("Parse Query time", System.currentTimeMillis() - lstartTime); LOGGER.debug("Parsed Query Time:\t" + (System.currentTimeMillis() - lstartTime)); lstartTime = System.currentTimeMillis(); SolrIndexSearcher searcher = req.getSearcher(); SchemaField uniqueKeyField = searcher.getSchema().getUniqueKeyField(); // Parse Required Params // This will either have a single Reader or valid query // Find documents SenseLikeThis - either with a reader or a query // -------------------------------------------------------------------------------- SenseQuery slt = null; if (q == null) { numErrors++; throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "SenseLikeThis requires either a query (?q=) or text to find similar documents."); } // Matching options boolean includeMatch = params.getBool(MoreLikeThisParams.MATCH_INCLUDE, true); int matchOffset = params.getInt(MoreLikeThisParams.MATCH_OFFSET, 0); // Find the base match DocList match = searcher.getDocList(query, null, null, matchOffset, 1, flags); // only get the first one... if (includeMatch) { rsp.add("match", match); } DocIterator iterator = match.iterator(); if (!iterator.hasNext()) { numErrors++; throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "SenseLikeThis no document found matching request."); } int id = iterator.nextDoc(); timinginfo.add("Find Query Doc", System.currentTimeMillis() - lstartTime); LOGGER.debug("Find Query Doc:\t" + (System.currentTimeMillis() - lstartTime)); lstartTime = System.currentTimeMillis(); SolrCache sc = searcher.getCache("com.searchbox.sltcache"); DocListAndSet sltDocs = null; if (sc != null) { //try to get from cache sltDocs = (DocListAndSet) sc.get(key.getSet()); } else { LOGGER.error("com.searchbox.sltcache not defined, can't cache slt queries"); } sltDocs = (DocListAndSet) sc.get(key.getSet()); if (start + rows > 1000 || sltDocs == null || !params.getBool(CommonParams.CACHE, true)) { //not in cache, need to do search BooleanQuery bq = new BooleanQuery(); Document doc = searcher.getIndexReader().document(id); bq.add(new TermQuery(new Term(uniqueKeyField.getName(), uniqueKeyField.getType().storedToIndexed(doc.getField(uniqueKeyField.getName())))), BooleanClause.Occur.MUST_NOT); filters.add(bq); String[] senseFields = splitList .split(params.get(SenseParams.SENSE_FIELD, SenseParams.DEFAULT_SENSE_FIELD)); String senseField = (senseFields[0] != null) ? senseFields[0] : SenseParams.DEFAULT_SENSE_FIELD; //TODO more intelligent handling of multiple fields , can probably do a boolean junction of multiple sensequeries, but this will be slow long maxlength = -1; for (String possibleField : senseFields) { try { long flength = doc.getField(possibleField).stringValue().length(); if (flength > maxlength) { senseField = possibleField; maxlength = flength; } } catch (Exception e) { System.out.println("Error: " + e.getMessage()); } } LOGGER.debug("Using sense field :\t" + (senseField)); String CKBid = params.get(SenseParams.SENSE_CKB, SenseParams.SENSE_CKB_DEFAULT); RealTermFreqVector rtv = new RealTermFreqVector(id, searcher.getIndexReader(), senseField); timinginfo.add("Make real term freq vector", System.currentTimeMillis() - lstartTime); lstartTime = System.currentTimeMillis(); qr = new QueryReductionFilter(rtv, CKBid, searcher, senseField); qr.setNumtermstouse(params.getInt(SenseParams.SENSE_QR_NTU, SenseParams.SENSE_QR_NTU_DEFAULT)); qr.setThreshold(params.getInt(SenseParams.SENSE_QR_THRESH, SenseParams.SENSE_QR_THRESH_DEFAULT)); qr.setMaxDocSubSet(params.getInt(SenseParams.SENSE_QR_MAXDOC, SenseParams.SENSE_QR_MAXDOC_DEFAULT)); qr.setMinDocSetSizeForFilter( params.getInt(SenseParams.SENSE_MINDOC4QR, SenseParams.SENSE_MINDOC4QR_DEFAULT)); numTermsUsed += qr.getNumtermstouse(); numTermsConsidered += rtv.getSize(); timinginfo.add("Setup SLT query", System.currentTimeMillis() - lstartTime); LOGGER.debug("Setup SLT query:\t" + (System.currentTimeMillis() - lstartTime)); lstartTime = System.currentTimeMillis(); DocList subFiltered = qr.getSubSetToSearchIn(filters); timinginfo.add("Do Query Redux", System.currentTimeMillis() - lstartTime); LOGGER.debug("Do query redux:\t" + (System.currentTimeMillis() - lstartTime)); lstartTime = System.currentTimeMillis(); numFiltered += qr.getFiltered().docList.size(); numSubset += subFiltered.size(); LOGGER.info("Number of documents to search:\t" + subFiltered.size()); slt = new SenseQuery(rtv, senseField, CKBid, params.getFloat(SenseParams.SENSE_WEIGHT, SenseParams.DEFAULT_SENSE_WEIGHT), null); LOGGER.debug("Setup sense query:\t" + (System.currentTimeMillis() - lstartTime)); timinginfo.add("Setup sense query", System.currentTimeMillis() - lstartTime); lstartTime = System.currentTimeMillis(); sltDocs = searcher.getDocListAndSet(slt, subFiltered, Sort.RELEVANCE, 0, 1000, flags); timinginfo.add("Do sense query", System.currentTimeMillis() - lstartTime); lstartTime = System.currentTimeMillis(); LOGGER.debug("Adding this keyto cache:\t" + key.getSet().toString()); searcher.getCache("com.searchbox.sltcache").put(key.getSet(), sltDocs); } else { fromcache = true; timinginfo.add("Getting from cache", System.currentTimeMillis() - lstartTime); LOGGER.debug("Got result from cache"); lstartTime = System.currentTimeMillis(); } if (sltDocs == null) { numEmpty++; sltDocs = new DocListAndSet(); // avoid NPE } rsp.add("response", sltDocs.docList.subset(start, rows)); // maybe facet the results if (params.getBool(FacetParams.FACET, false)) { if (sltDocs.docSet == null) { rsp.add("facet_counts", null); } else { SimpleFacets f = new SimpleFacets(req, sltDocs.docSet, params); rsp.add("facet_counts", f.getFacetCounts()); } } timinginfo.add("Facet parts", System.currentTimeMillis() - lstartTime); LOGGER.debug("Facet parts:\t" + (System.currentTimeMillis() - lstartTime)); // Debug info, not doing it for the moment. boolean dbg = req.getParams().getBool(CommonParams.DEBUG_QUERY, false); boolean dbgQuery = false, dbgResults = false; if (dbg == false) {//if it's true, we are doing everything anyway. String[] dbgParams = req.getParams().getParams(CommonParams.DEBUG); if (dbgParams != null) { for (int i = 0; i < dbgParams.length; i++) { if (dbgParams[i].equals(CommonParams.QUERY)) { dbgQuery = true; } else if (dbgParams[i].equals(CommonParams.RESULTS)) { dbgResults = true; } } } } else { dbgQuery = true; dbgResults = true; } // Copied from StandardRequestHandler... perhaps it should be added to doStandardDebug? if (dbg == true) { try { lstartTime = System.currentTimeMillis(); NamedList<Object> dbgInfo = SolrPluginUtils.doStandardDebug(req, q, slt, sltDocs.docList.subset(start, rows), dbgQuery, dbgResults); dbgInfo.add("Query freqs", slt.getAllTermsasString()); if (null != dbgInfo) { if (null != filters) { dbgInfo.add("filter_queries", req.getParams().getParams(CommonParams.FQ)); List<String> fqs = new ArrayList<String>(filters.size()); for (Query fq : filters) { fqs.add(QueryParsing.toString(fq, req.getSchema())); } dbgInfo.add("parsed_filter_queries", fqs); } if (null != qr) { dbgInfo.add("QueryReduction", qr.getDbgInfo()); } if (null != slt) { dbgInfo.add("SLT", slt.getDbgInfo()); } dbgInfo.add("fromcache", fromcache); rsp.add("debug", dbgInfo); timinginfo.add("Debugging parts", System.currentTimeMillis() - lstartTime); dbgInfo.add("timings", timinginfo); } } catch (Exception e) { SolrException.log(SolrCore.log, "Exception during debug", e); rsp.add("exception_during_debug", SolrException.toStr(e)); } } } catch (Exception e) { numErrors++; e.printStackTrace(); } finally { totalTime += System.currentTimeMillis() - startTime; } }