Example usage for org.apache.solr.common.params CommonParams ROWS

List of usage examples for org.apache.solr.common.params CommonParams ROWS

Introduction

In this page you can find the example usage for org.apache.solr.common.params CommonParams ROWS.

Prototype

String ROWS

To view the source code for org.apache.solr.common.params CommonParams ROWS.

Click Source Link

Document

number of documents to return starting at "start"

Usage

From source file:com.doculibre.constellio.opensearch.OpenSearchSolrServer.java

License:Open Source License

@Override
public NamedList<Object> request(SolrRequest request) throws SolrServerException, IOException {
    SolrParams params = request.getParams();
    if (params == null) {
        params = new ModifiableSolrParams();
    }/*from ww w. j  a  v  a 2 s .  co  m*/

    String openSearchURL = params.get("openSearchURL");
    if (openSearchURL == null) {
        throw new SolrServerException("openSearchURL param is missing");
    }
    String query = params.get(CommonParams.Q);
    int start = params.getInt(CommonParams.START, 0);
    int hitsPerPage = params.getInt(CommonParams.ROWS, 10);
    String lang = params.get("lang");

    Map<String, String> paramsMap = new HashMap<String, String>();
    if (SimpleSearch.SEARCH_ALL.equals(query)) {
        query = "url:http";
    }
    paramsMap.put("query", query);
    if (StringUtils.isNotBlank(lang)) {
        paramsMap.put("lang", "" + lang);
    }
    paramsMap.put("start", "" + start);
    paramsMap.put("hitsPerPage", "" + hitsPerPage);
    // FIXME
    paramsMap.put("hitsPerDup", "" + Integer.MAX_VALUE);
    Element rootElement = sendGet(openSearchURL, paramsMap);

    SolrDocumentList solrDocumentList = parse(rootElement);
    SimpleOrderedMap<Object> result = new SimpleOrderedMap<Object>();
    result.add("response", solrDocumentList);
    return result;
}

From source file:com.doculibre.constellio.opensearch.OpenSearchSolrServer.java

License:Open Source License

public static void main(String[] args) throws Exception {
    SolrDocumentList solrDocumentList;/*ww  w  .j a va  2s.c om*/
    //        
    // InputStream inputXML = OpenSearchSolrServer.class.getResourceAsStream("opensearch_example.xml");
    // SAXReader saxReader = new SAXReader();
    // Document doc = saxReader.read(inputXML);
    // IOUtils.closeQuietly(inputXML);
    //
    // System.out.println("Mock request");
    // solrDocumentList = parse(doc.getRootElement());
    // printResults(solrDocumentList);

    System.out.println("Real request");
    OpenSearchSolrServer solrServer = new OpenSearchSolrServer();
    ModifiableSolrParams params = new ModifiableSolrParams();
    params.add("openSearchURL", "http://recherched.gouv.qc.ca/internet/opensearch");
    params.add(CommonParams.Q, "doculibre");
    params.add(CommonParams.START, "5");
    params.add(CommonParams.ROWS, "10");
    params.add("lang", "en");
    NamedList<Object> results = solrServer.request(new QueryRequest(params));
    solrDocumentList = (SolrDocumentList) results.get("response");
    printResults(solrDocumentList);

    QueryResponse queryResponse = solrServer.query(params);
    solrDocumentList = queryResponse.getResults();
    printResults(solrDocumentList);
}

From source file:com.frank.search.common.PagerComponent.java

License:GNU General Public License

@Override
@SuppressWarnings("unchecked")
public void process(ResponseBuilder rb) throws IOException {
    /* get request params */
    SolrParams par = rb.req.getParams();
    int rows = par.getInt(CommonParams.ROWS, 0);
    int start = par.getInt(CommonParams.START, 0);
    int pages = par.getInt(PARAM_PAGER, 0);
    int pages_pre = par.getInt(PARAM_PAGER_PRE, 2);

    /* neet to work ? */
    if (pages == 0 || rows == 0)
        return;// ww w.  j  av a  2s .c om

    /* pager list */
    NamedList lst = new SimpleOrderedMap<Object>();
    NamedList lst2 = new SimpleOrderedMap<Object>();

    /* paging pages */
    int doc_count = rb.getResults().docSet.size();
    int page_count = doc_count / rows;
    int page_actual = start / rows;
    int page_pre = pages_pre;
    int page_post = pages - page_pre - 1;

    /* page range */
    if (page_actual - page_pre < 0) {
        page_post += -(page_actual - page_pre);
        page_pre -= -(page_actual - page_pre);
    } else if (page_actual + page_post > page_count) {
        page_post = pages - page_pre;
        page_pre = page_actual + pages - page_count;
    }

    /* sanity */
    if (page_pre < 0)
        page_pre = 0;
    if (page_post < 0)
        page_post = 0;

    /* next pages list */
    int i = (page_actual - page_pre);
    for (i = (i <= 0 ? 0 : i); i <= page_count && i <= (page_actual + page_post); i++)
        lst2.add(Integer.toString(i + 1), i * rows);
    lst.add("pages", lst2);

    /* navi */
    if (page_actual > 0)
        lst.add("prev", (page_actual - 1) * rows);
    if (page_actual - page_pre > 0)
        lst.add("first", 0);
    if (page_actual < page_count)
        lst.add("next", (page_actual + 1) * rows);
    if (page_actual + page_post < page_count)
        lst.add("last", page_count * rows);
    lst.add("actual", page_actual + 1);
    lst.add("count", page_count);

    /* finish */
    rb.rsp.add("pager", lst);
}

From source file:com.searchbox.solr.CategoryLikeThis.java

License:Apache License

@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
    numRequests++;// ww w  .ja  v  a2 s.  c o  m
    long startTime = System.currentTimeMillis();
    if (!keystate) {
        LOGGER.error(
                "License key failure, not performing clt query. Please email contact@searchbox.com for more information.");
        return;
    }

    try {
        SolrParams params = req.getParams();
        String senseField = params.get(SenseParams.SENSE_FIELD, SenseParams.DEFAULT_SENSE_FIELD);
        BooleanQuery catfilter = new BooleanQuery();
        // Set field flags
        ReturnFields returnFields = new SolrReturnFields(req);
        rsp.setReturnFields(returnFields);
        int flags = 0;
        if (returnFields.wantsScore()) {
            flags |= SolrIndexSearcher.GET_SCORES;
        }

        String defType = params.get(QueryParsing.DEFTYPE, QParserPlugin.DEFAULT_QTYPE);
        String q = params.get(CommonParams.Q);
        Query query = null;
        SortSpec sortSpec = null;
        List<Query> filters = new LinkedList<Query>();
        List<RealTermFreqVector> prototypetfs = new LinkedList<RealTermFreqVector>();

        try {
            if (q != null) {
                QParser parser = QParser.getParser(q, defType, req);
                query = parser.getQuery();
                sortSpec = parser.getSort(true);
            }

            String[] fqs = req.getParams().getParams(CommonParams.FQ);
            if (fqs != null && fqs.length != 0) {
                for (String fq : fqs) {
                    if (fq != null && fq.trim().length() != 0) {
                        QParser fqp = QParser.getParser(fq, null, req);
                        filters.add(fqp.getQuery());
                    }
                }
            }
        } catch (Exception e) {
            numErrors++;
            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
        }

        SolrIndexSearcher searcher = req.getSearcher();
        DocListAndSet cltDocs = null;

        // Parse Required Params
        // This will either have a single Reader or valid query
        Reader reader = null;
        try {
            if (q == null || q.trim().length() < 1) {
                Iterable<ContentStream> streams = req.getContentStreams();
                if (streams != null) {
                    Iterator<ContentStream> iter = streams.iterator();
                    if (iter.hasNext()) {
                        reader = iter.next().getReader();
                    }
                    if (iter.hasNext()) {
                        numErrors++;
                        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                                "SenseLikeThis does not support multiple ContentStreams");
                    }
                }
            }

            int start = params.getInt(CommonParams.START, 0);
            int rows = params.getInt(CommonParams.ROWS, 10);

            // Find documents SenseLikeThis - either with a reader or a query
            // --------------------------------------------------------------------------------
            if (reader != null) {
                numErrors++;
                throw new RuntimeException("SLT based on a reader is not yet implemented");
            } else if (q != null) {

                LOGGER.debug("Query for category:\t" + query);
                DocList match = searcher.getDocList(query, null, null, 0, 10, flags); // get first 10
                if (match.size() == 0) { // no docs to make prototype!
                    LOGGER.info("No documents found for prototype!");
                    rsp.add("response", new DocListAndSet());
                    return;
                }

                HashMap<String, Float> overallFreqMap = new HashMap<String, Float>();
                // Create the TF of blah blah blah
                DocIterator iterator = match.iterator();
                while (iterator.hasNext()) {
                    // do a MoreLikeThis query for each document in results
                    int id = iterator.nextDoc();
                    LOGGER.trace("Working on doc:\t" + id);
                    RealTermFreqVector rtv = new RealTermFreqVector(id, searcher.getIndexReader(), senseField);
                    for (int zz = 0; zz < rtv.getSize(); zz++) {
                        Float prev = overallFreqMap.get(rtv.getTerms()[zz]);
                        if (prev == null) {
                            prev = 0f;
                        }
                        overallFreqMap.put(rtv.getTerms()[zz], rtv.getFreqs()[zz] + prev);
                    }
                    prototypetfs.add(rtv);
                }

                List<String> sortedKeys = Ordering.natural().onResultOf(Functions.forMap(overallFreqMap))
                        .immutableSortedCopy(overallFreqMap.keySet());
                int keyiter = Math.min(sortedKeys.size() - 1, BooleanQuery.getMaxClauseCount() - 1);
                LOGGER.debug("I have this many terms:\t" + sortedKeys.size());
                LOGGER.debug("And i'm going to use this many:\t" + keyiter);
                for (; keyiter >= 0; keyiter--) {
                    TermQuery tq = new TermQuery(new Term(senseField, sortedKeys.get(keyiter)));
                    catfilter.add(tq, BooleanClause.Occur.SHOULD);
                }

            } else {
                numErrors++;
                throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                        "CategoryLikeThis requires either a query (?q=) or text to find similar documents.");
            }

            LOGGER.debug("document filter is: \t" + catfilter);
            CategorizationBase model = new CategorizationBase(prototypetfs);
            CategoryQuery clt = CategoryQuery.CategoryQueryForDocument(catfilter, model,
                    searcher.getIndexReader(), senseField);
            DocSet filtered = searcher.getDocSet(filters);
            cltDocs = searcher.getDocListAndSet(clt, filtered, Sort.RELEVANCE, start, rows, flags);
        } finally {
            if (reader != null) {
                reader.close();
            }
        }

        if (cltDocs == null) {
            numEmpty++;
            cltDocs = new DocListAndSet(); // avoid NPE
        }
        rsp.add("response", cltDocs.docList);

        // maybe facet the results
        if (params.getBool(FacetParams.FACET, false)) {
            if (cltDocs.docSet == null) {
                rsp.add("facet_counts", null);
            } else {
                SimpleFacets f = new SimpleFacets(req, cltDocs.docSet, params);
                rsp.add("facet_counts", f.getFacetCounts());
            }
        }
    } catch (Exception e) {
        numErrors++;
    } finally {
        totalTime += System.currentTimeMillis() - startTime;
    }

}

From source file:com.searchbox.solr.SenseLikeThisHandler.java

License:Apache License

@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
    NamedList<Object> timinginfo = new NamedList<Object>();
    numRequests++;//w w w  .ja v a2s  .  c o  m
    long startTime = System.currentTimeMillis();
    long lstartTime = System.currentTimeMillis();
    if (!keystate) {
        LOGGER.error(
                "License key failure, not performing sense query. Please email contact@searchbox.com for more information.");
        return;
    }

    boolean fromcache = false;

    try {
        SolrParams params = req.getParams();
        int start = params.getInt(CommonParams.START, 0);
        int rows = params.getInt(CommonParams.ROWS, 10);

        HashSet<String> toIgnore = (new HashSet<String>());
        toIgnore.add("start");
        toIgnore.add("rows");
        toIgnore.add("fl");
        toIgnore.add("wt");
        toIgnore.add("indent");

        SolrCacheKey key = new SolrCacheKey(params, toIgnore);

        // Set field flags
        ReturnFields returnFields = new SolrReturnFields(req);
        rsp.setReturnFields(returnFields);
        int flags = 0;
        if (returnFields.wantsScore()) {
            flags |= SolrIndexSearcher.GET_SCORES;
        }

        String defType = params.get(QueryParsing.DEFTYPE, QParserPlugin.DEFAULT_QTYPE);
        String q = params.get(CommonParams.Q);
        Query query = null;
        QueryReductionFilter qr = null;
        SortSpec sortSpec = null;
        List<Query> filters = new ArrayList<Query>();

        try {
            if (q != null) {
                QParser parser = QParser.getParser(q, defType, req);
                query = parser.getQuery();
                sortSpec = parser.getSort(true);
            }

            String[] fqs = req.getParams().getParams(CommonParams.FQ);
            if (fqs != null && fqs.length != 0) {
                for (String fq : fqs) {
                    if (fq != null && fq.trim().length() != 0) {
                        QParser fqp = QParser.getParser(fq, null, req);
                        filters.add(fqp.getQuery());
                    }
                }
            }
        } catch (Exception e) {
            numErrors++;
            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
        }

        timinginfo.add("Parse Query time", System.currentTimeMillis() - lstartTime);
        LOGGER.debug("Parsed Query Time:\t" + (System.currentTimeMillis() - lstartTime));
        lstartTime = System.currentTimeMillis();

        SolrIndexSearcher searcher = req.getSearcher();
        SchemaField uniqueKeyField = searcher.getSchema().getUniqueKeyField();

        // Parse Required Params
        // This will either have a single Reader or valid query

        // Find documents SenseLikeThis - either with a reader or a query
        // --------------------------------------------------------------------------------
        SenseQuery slt = null;
        if (q == null) {
            numErrors++;
            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                    "SenseLikeThis requires either a query (?q=) or text to find similar documents.");

        }
        // Matching options
        boolean includeMatch = params.getBool(MoreLikeThisParams.MATCH_INCLUDE, true);
        int matchOffset = params.getInt(MoreLikeThisParams.MATCH_OFFSET, 0);
        // Find the base match

        DocList match = searcher.getDocList(query, null, null, matchOffset, 1, flags); // only get the first one...
        if (includeMatch) {
            rsp.add("match", match);
        }

        DocIterator iterator = match.iterator();
        if (!iterator.hasNext()) {
            numErrors++;
            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                    "SenseLikeThis no document found matching request.");
        }
        int id = iterator.nextDoc();

        timinginfo.add("Find Query Doc", System.currentTimeMillis() - lstartTime);
        LOGGER.debug("Find Query Doc:\t" + (System.currentTimeMillis() - lstartTime));
        lstartTime = System.currentTimeMillis();

        SolrCache sc = searcher.getCache("com.searchbox.sltcache");
        DocListAndSet sltDocs = null;
        if (sc != null) {
            //try to get from cache
            sltDocs = (DocListAndSet) sc.get(key.getSet());
        } else {
            LOGGER.error("com.searchbox.sltcache not defined, can't cache slt queries");
        }

        sltDocs = (DocListAndSet) sc.get(key.getSet());
        if (start + rows > 1000 || sltDocs == null || !params.getBool(CommonParams.CACHE, true)) { //not in cache, need to do search
            BooleanQuery bq = new BooleanQuery();
            Document doc = searcher.getIndexReader().document(id);
            bq.add(new TermQuery(new Term(uniqueKeyField.getName(),
                    uniqueKeyField.getType().storedToIndexed(doc.getField(uniqueKeyField.getName())))),
                    BooleanClause.Occur.MUST_NOT);
            filters.add(bq);

            String[] senseFields = splitList
                    .split(params.get(SenseParams.SENSE_FIELD, SenseParams.DEFAULT_SENSE_FIELD));
            String senseField = (senseFields[0] != null) ? senseFields[0] : SenseParams.DEFAULT_SENSE_FIELD;

            //TODO more intelligent handling of multiple fields , can probably do a boolean junction of multiple sensequeries, but this will be slow
            long maxlength = -1;
            for (String possibleField : senseFields) {
                try {
                    long flength = doc.getField(possibleField).stringValue().length();
                    if (flength > maxlength) {
                        senseField = possibleField;
                        maxlength = flength;
                    }
                } catch (Exception e) {
                    System.out.println("Error: " + e.getMessage());
                }
            }

            LOGGER.debug("Using sense field :\t" + (senseField));

            String CKBid = params.get(SenseParams.SENSE_CKB, SenseParams.SENSE_CKB_DEFAULT);

            RealTermFreqVector rtv = new RealTermFreqVector(id, searcher.getIndexReader(), senseField);
            timinginfo.add("Make real term freq vector", System.currentTimeMillis() - lstartTime);
            lstartTime = System.currentTimeMillis();

            qr = new QueryReductionFilter(rtv, CKBid, searcher, senseField);
            qr.setNumtermstouse(params.getInt(SenseParams.SENSE_QR_NTU, SenseParams.SENSE_QR_NTU_DEFAULT));
            qr.setThreshold(params.getInt(SenseParams.SENSE_QR_THRESH, SenseParams.SENSE_QR_THRESH_DEFAULT));
            qr.setMaxDocSubSet(params.getInt(SenseParams.SENSE_QR_MAXDOC, SenseParams.SENSE_QR_MAXDOC_DEFAULT));
            qr.setMinDocSetSizeForFilter(
                    params.getInt(SenseParams.SENSE_MINDOC4QR, SenseParams.SENSE_MINDOC4QR_DEFAULT));

            numTermsUsed += qr.getNumtermstouse();
            numTermsConsidered += rtv.getSize();

            timinginfo.add("Setup SLT query", System.currentTimeMillis() - lstartTime);
            LOGGER.debug("Setup SLT query:\t" + (System.currentTimeMillis() - lstartTime));
            lstartTime = System.currentTimeMillis();

            DocList subFiltered = qr.getSubSetToSearchIn(filters);
            timinginfo.add("Do Query Redux", System.currentTimeMillis() - lstartTime);
            LOGGER.debug("Do query redux:\t" + (System.currentTimeMillis() - lstartTime));
            lstartTime = System.currentTimeMillis();

            numFiltered += qr.getFiltered().docList.size();
            numSubset += subFiltered.size();
            LOGGER.info("Number of documents to search:\t" + subFiltered.size());

            slt = new SenseQuery(rtv, senseField, CKBid,
                    params.getFloat(SenseParams.SENSE_WEIGHT, SenseParams.DEFAULT_SENSE_WEIGHT), null);
            LOGGER.debug("Setup sense query:\t" + (System.currentTimeMillis() - lstartTime));
            timinginfo.add("Setup sense query", System.currentTimeMillis() - lstartTime);
            lstartTime = System.currentTimeMillis();

            sltDocs = searcher.getDocListAndSet(slt, subFiltered, Sort.RELEVANCE, 0, 1000, flags);
            timinginfo.add("Do sense query", System.currentTimeMillis() - lstartTime);
            lstartTime = System.currentTimeMillis();

            LOGGER.debug("Adding this keyto cache:\t" + key.getSet().toString());
            searcher.getCache("com.searchbox.sltcache").put(key.getSet(), sltDocs);

        } else {
            fromcache = true;
            timinginfo.add("Getting from cache", System.currentTimeMillis() - lstartTime);
            LOGGER.debug("Got result from cache");
            lstartTime = System.currentTimeMillis();
        }

        if (sltDocs == null) {
            numEmpty++;
            sltDocs = new DocListAndSet(); // avoid NPE
        }
        rsp.add("response", sltDocs.docList.subset(start, rows));

        // maybe facet the results
        if (params.getBool(FacetParams.FACET, false)) {
            if (sltDocs.docSet == null) {
                rsp.add("facet_counts", null);
            } else {
                SimpleFacets f = new SimpleFacets(req, sltDocs.docSet, params);
                rsp.add("facet_counts", f.getFacetCounts());
            }
        }
        timinginfo.add("Facet parts", System.currentTimeMillis() - lstartTime);
        LOGGER.debug("Facet parts:\t" + (System.currentTimeMillis() - lstartTime));

        // Debug info, not doing it for the moment.
        boolean dbg = req.getParams().getBool(CommonParams.DEBUG_QUERY, false);

        boolean dbgQuery = false, dbgResults = false;
        if (dbg == false) {//if it's true, we are doing everything anyway.
            String[] dbgParams = req.getParams().getParams(CommonParams.DEBUG);
            if (dbgParams != null) {
                for (int i = 0; i < dbgParams.length; i++) {
                    if (dbgParams[i].equals(CommonParams.QUERY)) {
                        dbgQuery = true;
                    } else if (dbgParams[i].equals(CommonParams.RESULTS)) {
                        dbgResults = true;
                    }
                }
            }
        } else {
            dbgQuery = true;
            dbgResults = true;
        }
        // Copied from StandardRequestHandler... perhaps it should be added to doStandardDebug?
        if (dbg == true) {
            try {
                lstartTime = System.currentTimeMillis();
                NamedList<Object> dbgInfo = SolrPluginUtils.doStandardDebug(req, q, slt,
                        sltDocs.docList.subset(start, rows), dbgQuery, dbgResults);
                dbgInfo.add("Query freqs", slt.getAllTermsasString());
                if (null != dbgInfo) {
                    if (null != filters) {
                        dbgInfo.add("filter_queries", req.getParams().getParams(CommonParams.FQ));
                        List<String> fqs = new ArrayList<String>(filters.size());
                        for (Query fq : filters) {
                            fqs.add(QueryParsing.toString(fq, req.getSchema()));
                        }
                        dbgInfo.add("parsed_filter_queries", fqs);
                    }
                    if (null != qr) {
                        dbgInfo.add("QueryReduction", qr.getDbgInfo());
                    }
                    if (null != slt) {
                        dbgInfo.add("SLT", slt.getDbgInfo());

                    }

                    dbgInfo.add("fromcache", fromcache);
                    rsp.add("debug", dbgInfo);
                    timinginfo.add("Debugging parts", System.currentTimeMillis() - lstartTime);
                    dbgInfo.add("timings", timinginfo);
                }
            } catch (Exception e) {
                SolrException.log(SolrCore.log, "Exception during debug", e);
                rsp.add("exception_during_debug", SolrException.toStr(e));
            }
        }
    } catch (Exception e) {
        numErrors++;
        e.printStackTrace();
    } finally {
        totalTime += System.currentTimeMillis() - startTime;
    }

}

From source file:com.searchbox.solr.SenseLikeThisHandlerNoReduction.java

License:Apache License

@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
    SolrParams params = req.getParams();

    if (!keystate) {
        LOGGER.error(/*from w w w.j av a 2s  .c o  m*/
                "License key failure, not performing sense query. Please email contact@searchbox.com for more information.");
        return;
    }

    int docID;
    // Set field flags
    ReturnFields returnFields = new SolrReturnFields(req);
    rsp.setReturnFields(returnFields);
    int flags = 0;
    if (returnFields.wantsScore()) {
        flags |= SolrIndexSearcher.GET_SCORES;
    }

    String defType = params.get(QueryParsing.DEFTYPE, QParserPlugin.DEFAULT_QTYPE);
    String q = params.get(CommonParams.Q);
    Query query = null;
    SortSpec sortSpec = null;
    List<Query> filters = new ArrayList<Query>();

    try {
        if (q != null) {
            QParser parser = QParser.getParser(q, defType, req);
            query = parser.getQuery();
            sortSpec = parser.getSort(true);
        }

        String[] fqs = req.getParams().getParams(CommonParams.FQ);
        if (fqs != null && fqs.length != 0) {
            for (String fq : fqs) {
                if (fq != null && fq.trim().length() != 0) {
                    QParser fqp = QParser.getParser(fq, null, req);
                    filters.add(fqp.getQuery());
                }
            }
        }
    } catch (Exception e) {
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
    }

    SolrIndexSearcher searcher = req.getSearcher();
    SchemaField uniqueKeyField = searcher.getSchema().getUniqueKeyField();

    DocListAndSet sltDocs = null;

    // Parse Required Params
    // This will either have a single Reader or valid query
    Reader reader = null;
    try {
        if (q == null || q.trim().length() < 1) {
            Iterable<ContentStream> streams = req.getContentStreams();
            if (streams != null) {
                Iterator<ContentStream> iter = streams.iterator();
                if (iter.hasNext()) {
                    reader = iter.next().getReader();
                }
                if (iter.hasNext()) {
                    throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                            "SenseLikeThis does not support multiple ContentStreams");
                }
            }
        }

        int start = params.getInt(CommonParams.START, 0);
        int rows = params.getInt(CommonParams.ROWS, 10);

        // Find documents SenseLikeThis - either with a reader or a query
        // --------------------------------------------------------------------------------
        SenseQuery slt = null;
        if (reader != null) {
            throw new RuntimeException("SLT based on a reader is not yet implemented");
        } else if (q != null) {
            // Matching options
            boolean includeMatch = params.getBool(MoreLikeThisParams.MATCH_INCLUDE, true);
            int matchOffset = params.getInt(MoreLikeThisParams.MATCH_OFFSET, 0);
            // Find the base match

            DocList match = searcher.getDocList(query, null, null, matchOffset, 1, flags); // only get the first one...
            if (includeMatch) {
                rsp.add("match", match);
            }

            // Get docID
            DocIterator iterator = match.iterator();
            docID = iterator.nextDoc();

            BooleanQuery bq = new BooleanQuery();
            Document doc = searcher.getIndexReader().document(docID);
            bq.add(new TermQuery(new Term(uniqueKeyField.getName(),
                    uniqueKeyField.getType().storedToIndexed(doc.getField(uniqueKeyField.getName())))),
                    BooleanClause.Occur.MUST_NOT);
            filters.add(bq);

        } else {
            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                    "SenseLikeThis requires either a query (?q=) or text to find similar documents.");
        }

        String CKBid = params.get(SenseParams.SENSE_CKB, SenseParams.SENSE_CKB_DEFAULT);

        String senseField = params.get(SenseParams.SENSE_FIELD, SenseParams.DEFAULT_SENSE_FIELD);
        slt = new SenseQuery(new RealTermFreqVector(docID, searcher.getIndexReader(), senseField), senseField,
                CKBid, params.getFloat(SenseParams.SENSE_WEIGHT, SenseParams.DEFAULT_SENSE_WEIGHT), null);

        //Execute the SLT query
        //DocSet filtered = searcher.getDocSet(filters);
        //System.out.println("Number of documents to search:\t" + filtered.size());
        //sltDocs = searcher.getDocListAndSet(slt, filtered, Sort.RELEVANCE, start, rows, flags);
        sltDocs = searcher.getDocListAndSet(slt, filters, Sort.RELEVANCE, start, rows, flags);

    } finally {
        if (reader != null) {
            reader.close();
        }
    }

    if (sltDocs == null) {
        sltDocs = new DocListAndSet(); // avoid NPE
    }
    rsp.add("response", sltDocs.docList);

    // maybe facet the results
    if (params.getBool(FacetParams.FACET, false)) {
        if (sltDocs.docSet == null) {
            rsp.add("facet_counts", null);
        } else {
            SimpleFacets f = new SimpleFacets(req, sltDocs.docSet, params);
            rsp.add("facet_counts", f.getFacetCounts());
        }
    }

    // Debug info, not doing it for the moment.
    boolean dbg = req.getParams().getBool(CommonParams.DEBUG_QUERY, false);

    boolean dbgQuery = false, dbgResults = false;
    if (dbg == false) {//if it's true, we are doing everything anyway.
        String[] dbgParams = req.getParams().getParams(CommonParams.DEBUG);
        if (dbgParams != null) {
            for (int i = 0; i < dbgParams.length; i++) {
                if (dbgParams[i].equals(CommonParams.QUERY)) {
                    dbgQuery = true;
                } else if (dbgParams[i].equals(CommonParams.RESULTS)) {
                    dbgResults = true;
                }
            }
        }
    } else {
        dbgQuery = true;
        dbgResults = true;
    }
    // Copied from StandardRequestHandler... perhaps it should be added to doStandardDebug?
    if (dbg == true) {
        try {

            NamedList<Object> dbgInfo = SolrPluginUtils.doStandardDebug(req, q, query, sltDocs.docList,
                    dbgQuery, dbgResults);
            if (null != dbgInfo) {
                if (null != filters) {
                    dbgInfo.add("filter_queries", req.getParams().getParams(CommonParams.FQ));
                    List<String> fqs = new ArrayList<String>(filters.size());
                    for (Query fq : filters) {
                        fqs.add(QueryParsing.toString(fq, req.getSchema()));
                    }
                    dbgInfo.add("parsed_filter_queries", fqs);
                }
                rsp.add("debug", dbgInfo);
            }
        } catch (Exception e) {
            SolrException.log(SolrCore.log, "Exception during debug", e);
            rsp.add("exception_during_debug", SolrException.toStr(e));
        }
    }
}

From source file:com.searchbox.solr.SenseQueryHandler.java

@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
    NamedList<Object> timinginfo = new NamedList<Object>();
    numRequests++;//from   ww  w. ja  v a2s  . c  o m
    long startTime = System.currentTimeMillis();
    long lstartTime = System.currentTimeMillis();

    if (!keystate) {
        LOGGER.error(
                "License key failure, not performing sense query. Please email contact@searchbox.com for more information.");
        return;
    }

    boolean fromcache = false;

    try {
        SolrParams params = req.getParams();
        HashSet<String> toIgnore = new HashSet<String>();
        toIgnore.add("start");
        toIgnore.add("rows");
        toIgnore.add("fl");
        toIgnore.add("wt");
        toIgnore.add("indent");

        SolrCacheKey key = new SolrCacheKey(params, toIgnore);

        // Set field flags
        ReturnFields returnFields = new SolrReturnFields(req);
        rsp.setReturnFields(returnFields);
        int flags = 0;
        if (returnFields.wantsScore()) {
            flags |= SolrIndexSearcher.GET_SCORES;
        }

        String defType = params.get(QueryParsing.DEFTYPE, QParserPlugin.DEFAULT_QTYPE);
        String q = params.get(CommonParams.Q);
        Query query = null;
        QueryReductionFilter qr = null;
        List<Query> filters = new ArrayList<Query>();

        try {
            if (q != null) {
                QParser parser = QParser.getParser(q, defType, req);
                query = parser.getQuery();

            }

            String[] fqs = req.getParams().getParams(CommonParams.FQ);
            if (fqs != null && fqs.length != 0) {
                for (String fq : fqs) {
                    if (fq != null && fq.trim().length() != 0) {
                        QParser fqp = QParser.getParser(fq, null, req);
                        filters.add(fqp.getQuery());
                    }
                }
            }
        } catch (Exception e) {
            numErrors++;
            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
        }

        int start = params.getInt(CommonParams.START, 0);
        int rows = params.getInt(CommonParams.ROWS, 10);

        SenseQuery slt = null;
        if (q == null) {
            numErrors++;
            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                    "SenseLikeThis requires either a query (?q=) or text to find similar documents.");
        }

        timinginfo.add("Parse Query time", System.currentTimeMillis() - lstartTime);
        LOGGER.debug("Parsed Query Time:\t" + (System.currentTimeMillis() - lstartTime));
        lstartTime = System.currentTimeMillis();

        SolrIndexSearcher searcher = req.getSearcher();
        SolrCache sc = searcher.getCache("com.searchbox.sltcache");
        DocListAndSet sltDocs = null;
        if (sc != null) {
            //try to get from cache
            sltDocs = (DocListAndSet) sc.get(key.getSet());
        } else {
            LOGGER.error("com.searchbox.sltcache not defined, can't cache slt queries");
        }

        if (start + rows > 1000 || sltDocs == null || !params.getBool(CommonParams.CACHE, true)) { //not in cache, need to do search
            String CKBid = params.get(SenseParams.SENSE_CKB, SenseParams.SENSE_CKB_DEFAULT);
            String senseField = params.get(SenseParams.SENSE_FIELD, SenseParams.DEFAULT_SENSE_FIELD);
            RealTermFreqVector rtv = new RealTermFreqVector(q,
                    SenseQuery.getAnalyzerForField(req.getSchema(), senseField));
            timinginfo.add("Make real term freq vector", System.currentTimeMillis() - lstartTime);
            lstartTime = System.currentTimeMillis();

            qr = new QueryReductionFilter(rtv, CKBid, searcher, senseField);
            qr.setNumtermstouse(params.getInt(SenseParams.SENSE_QR_NTU, SenseParams.SENSE_QR_NTU_DEFAULT));

            numTermsUsed += qr.getNumtermstouse();
            numTermsConsidered += rtv.getSize();

            qr.setThreshold(params.getInt(SenseParams.SENSE_QR_THRESH, SenseParams.SENSE_QR_THRESH_DEFAULT));
            qr.setMaxDocSubSet(params.getInt(SenseParams.SENSE_QR_MAXDOC, SenseParams.SENSE_QR_MAXDOC_DEFAULT));
            qr.setMinDocSetSizeForFilter(
                    params.getInt(SenseParams.SENSE_MINDOC4QR, SenseParams.SENSE_MINDOC4QR_DEFAULT));

            timinginfo.add("Setup Sense query", System.currentTimeMillis() - lstartTime);
            LOGGER.debug("Setup Sense query:\t" + (System.currentTimeMillis() - lstartTime));
            lstartTime = System.currentTimeMillis();

            DocList subFiltered = qr.getSubSetToSearchIn(filters);
            timinginfo.add("Do Query Redux", System.currentTimeMillis() - lstartTime);
            LOGGER.debug("Do query redux:\t" + (System.currentTimeMillis() - lstartTime));
            lstartTime = System.currentTimeMillis();

            numFiltered += qr.getFiltered().docList.size();
            numSubset += subFiltered.size();
            LOGGER.info("Number of documents to search:\t" + subFiltered.size());

            slt = new SenseQuery(rtv, senseField, CKBid,
                    params.getFloat(SenseParams.SENSE_WEIGHT, SenseParams.DEFAULT_SENSE_WEIGHT), null);
            LOGGER.debug("Setup sense query:\t" + (System.currentTimeMillis() - lstartTime));
            timinginfo.add("Setup sense query", System.currentTimeMillis() - lstartTime);
            lstartTime = System.currentTimeMillis();

            sltDocs = searcher.getDocListAndSet(slt, subFiltered, Sort.RELEVANCE, 0, 1000, flags);
            timinginfo.add("Do sense query", System.currentTimeMillis() - lstartTime);
            lstartTime = System.currentTimeMillis();

            LOGGER.debug("Adding this keyto cache:\t" + key.getSet().toString());
            searcher.getCache("com.searchbox.sltcache").put(key.getSet(), sltDocs);

        } else {
            fromcache = true;
            timinginfo.add("Getting from cache", System.currentTimeMillis() - lstartTime);
            LOGGER.debug("Got result from cache");
            lstartTime = System.currentTimeMillis();
        }

        if (sltDocs == null) {
            numEmpty++;
            sltDocs = new DocListAndSet(); // avoid NPE
        }
        rsp.add("response", sltDocs.docList.subset(start, rows));

        // --------- OLD CODE BELOW
        // maybe facet the results
        if (params.getBool(FacetParams.FACET, false)) {
            if (sltDocs.docSet == null) {
                rsp.add("facet_counts", null);
            } else {
                SimpleFacets f = new SimpleFacets(req, sltDocs.docSet, params);
                rsp.add("facet_counts", f.getFacetCounts());
            }
        }

        // Debug info, not doing it for the moment.
        boolean dbg = req.getParams().getBool(CommonParams.DEBUG_QUERY, false);

        boolean dbgQuery = false, dbgResults = false;
        if (dbg == false) {//if it's true, we are doing everything anyway.
            String[] dbgParams = req.getParams().getParams(CommonParams.DEBUG);
            if (dbgParams != null) {
                for (int i = 0; i < dbgParams.length; i++) {
                    if (dbgParams[i].equals(CommonParams.QUERY)) {
                        dbgQuery = true;
                    } else if (dbgParams[i].equals(CommonParams.RESULTS)) {
                        dbgResults = true;
                    }
                }
            }
        } else {
            dbgQuery = true;
            dbgResults = true;
        }
        if (dbg == true) {
            try {
                NamedList dbgInfo = new SimpleOrderedMap();
                dbgInfo.add("Query freqs", slt.getAllTermsasString());
                dbgInfo.addAll(
                        getExplanations(slt, sltDocs.docList.subset(start, rows), searcher, req.getSchema()));
                if (null != filters) {
                    dbgInfo.add("filter_queries", req.getParams().getParams(CommonParams.FQ));
                    List<String> fqs = new ArrayList<String>(filters.size());
                    for (Query fq : filters) {
                        fqs.add(QueryParsing.toString(fq, req.getSchema()));
                    }
                    dbgInfo.add("parsed_filter_queries", fqs);
                }
                if (null != qr) {
                    dbgInfo.add("QueryReduction", qr.getDbgInfo());
                }
                if (null != slt) {
                    dbgInfo.add("SLT", slt.getDbgInfo());

                }
                dbgInfo.add("fromcache", fromcache);
                rsp.add("debug", dbgInfo);
                timinginfo.add("Debugging parts", System.currentTimeMillis() - lstartTime);
                dbgInfo.add("timings", timinginfo);

            } catch (Exception e) {
                SolrException.log(SolrCore.log, "Exception during debug", e);
                rsp.add("exception_during_debug", SolrException.toStr(e));
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
        numErrors++;
    } finally {
        totalTime += System.currentTimeMillis() - startTime;
    }
}

From source file:com.sn.solr.plugin.rank.RankEngine.java

License:Apache License

/**
 * Provides implementation for Dense ranking ["1223"] as identified by the
 * {@link RankStrategy#LEGACY_DENSE} the difference is that this
 * implementation is computed without using facet results so this will 
 * noticeably slower than computing rank based on facets
 * use {@link RankStrategy#DENSE}. Besides this implementation might cause 
 * lot of cache evictions putting stress on memory. 
 *
 * @see #computeDenseRank(List)//from  w  w w.  j  ava2s  .co  m
 * 
 * @param pairList List of {@link Pair} objects that holds the value of rank 
 * field & respective count.
 */
@Deprecated
public static Map<String, Number> computeLegacyDenseRank(ResponseBuilder rb, String idField, String rankField)
        throws IOException {
    SolrIndexSearcher searcher = rb.req.getSearcher();
    SolrParams params = rb.req.getParams();// .getParams(FacetParams.FACET_FIELD);

    String _start = params.get(CommonParams.START);
    String _rows = params.get(CommonParams.ROWS);
    int start = 0;
    int rows = 10;

    if (_start != null & AppHelper.isInteger(_start))
        start = new Integer(_start);
    if (_rows != null & AppHelper.isInteger(_rows))
        rows = new Integer(_rows);

    LOG.info("Computing rank using strategy: {}", RankStrategy.ORDINAL.getDescription());
    FieldSelector fs = new MapFieldSelector(new String[] { idField, rankField });
    Map<String, Number> rankMap = new HashMap<String, Number>();
    DocList docs = searcher.getDocList(rb.getQuery(), rb.getFilters(), rb.getSortSpec().getSort(), 0,
            start + rows, 0);
    int denseRank = 1;
    int _CurrScore = 0;
    int _PrevScore = 0;
    int i = 0;
    for (DocIterator it = docs.iterator(); it.hasNext();) {
        Document doc = searcher.doc(it.nextDoc(), fs);
        _CurrScore = new Integer(doc.get(rankField));
        if (i == 0) {
            _PrevScore = _CurrScore;
        }
        if (_PrevScore != _CurrScore) {
            _PrevScore = _CurrScore;
            denseRank++;
        }
        if (i >= start) {
            rankMap.put(doc.get(idField), denseRank);
        }
        i++;
    }

    return rankMap;
}

From source file:com.tsgrp.solr.handler.NYPhilGetTagsHandler.java

License:Mozilla Public License

@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse res)
        throws Exception, ParseException, InstantiationException, IllegalAccessException {
    NamedList<Object> params = req.getParams().toNamedList();

    String assetId = (String) params.get(PARAM_ASSET_ID);
    if (assetId == null || assetId.trim().length() < 0) {
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Asset Id required to retrieve tags.");
    }/*from   w w  w. ja v a 2 s.co m*/

    boolean allTags = (params.get(PARAM_ALL_TAGS) != null
            && Boolean.parseBoolean((String) params.get(PARAM_ALL_TAGS)));

    StringBuffer q = new StringBuffer("+").append(NYPhilSolrConstants.NPT_ASSET_ID_ESC).append(":")
            .append(QueryParser.escape(assetId));

    if (!allTags) {
        q.append(" +").append(NYPhilSolrConstants.NPT_STATUS_ESC).append(":")
                .append(NYPhilSolrConstants.STATUS_APPROVED);
    }

    String cb = (String) params.get(PARAM_CALLBACK);
    if (cb != null && cb.length() > 0) {
        params.add("json.wrf", cb);
    }

    params.add(CommonParams.HEADER_ECHO_PARAMS, "explicit");
    params.add(CommonParams.WT, "json");
    params.add("json.nl", "map");

    params.add(CommonParams.Q, q.toString());
    params.add(CommonParams.ROWS, 1000);

    req.setParams(SolrParams.toSolrParams(params));

    super.handleRequestBody(req, res);
}

From source file:com.tsgrp.solr.handler.NYPhilSearchHandler.java

License:Mozilla Public License

/**
 * @see org.apache.solr.handler.component.SearchHandler#handleRequestBody(org.apache.solr.request.SolrQueryRequest,
 *      org.apache.solr.request.SolrQueryResponse)
 *///from   w ww . ja  v a  2 s. co m
@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp)
        throws Exception, ParseException, InstantiationException, IllegalAccessException {
    SolrParams requestParams = req.getParams();

    NamedList<Object> params = requestParams.toNamedList();

    //for now lets echo the handler and all the params for debugging purposes
    //params.add( CommonParams.HEADER_ECHO_HANDLER, Boolean.TRUE );
    //params.add( CommonParams.HEADER_ECHO_PARAMS, CommonParams.EchoParamStyle.ALL );

    String rows = (String) params.get(CommonParams.ROWS);
    if (rows == null || rows.trim().length() < 1) {
        //setup items per page, default to 10 items
        params.add(CommonParams.ROWS, 10);
        rows = "10";
    }

    //enable faceting and only return facets with at least 1 item
    params.add(FacetParams.FACET, Boolean.TRUE);
    params.add(FacetParams.FACET_MINCOUNT, 1);

    //defult ordering to index order (alphabetical)
    params.add(FacetParams.FACET_SORT, FacetParams.FACET_SORT_INDEX);

    //only return a maximum of 10 facet values
    params.add(FacetParams.FACET_LIMIT, 10);

    //always add facets unless they are explicitly not requested
    String addFacets = (String) params.get(PARAM_GENERATE_FACETS);
    boolean generateFacets = (addFacets == null) ? true : addFacets.equalsIgnoreCase("true");

    //enable highlighting
    params.add(HighlightParams.HIGHLIGHT, Boolean.TRUE);

    //remove the query if provided, always want to use our translated query
    String originalQuery = (String) params.remove(CommonParams.Q);

    if (logger.isDebugEnabled()) {
        logger.debug("Original query: " + originalQuery);
    }

    //setup sorting params
    String sortColumn = (String) params.get(PARAM_SORT_COLUMN);
    String sortOrder = (String) params.get(PARAM_SORT_ORDER);
    if (sortColumn != null && sortOrder != null) {
        params.add(CommonParams.SORT, sortColumn + " " + sortOrder);
    }

    //get date fields
    String sDateFrom = (String) params.get(PARAM_DATE_FROM);
    String sDateTo = (String) params.get(PARAM_DATE_TO);
    Date dateFrom = null;
    Date dateTo = null;
    if (sDateFrom != null && sDateTo != null) {
        dateFrom = DATE_FORMAT_PARAM.parse(sDateFrom);
        dateTo = DATE_FORMAT_PARAM.parse(sDateTo);
    }

    String keywords = (String) params.get(PARAM_KEYWORDS);

    if (keywords == null || keywords.trim().length() < 1) {
        //query everything since nothing was provided
        keywords = "*";
    }

    //always use the extended dismax parser
    params.add("defType", ExtendedDismaxQParserPlugin.NAME);

    //the keywords sent in are the query since we're in DISMAX mode
    params.add(CommonParams.Q, keywords);

    String pageIndex = (String) params.get(PARAM_PAGE_INDEX);
    String resultsPerPage = (String) params.get(PARAM_RESULTS_PER_PAGE);
    if (resultsPerPage == null || resultsPerPage.trim().length() < 0) {
        resultsPerPage = "10";
    }

    if (pageIndex == null || pageIndex.trim().length() < 1) {
        pageIndex = "1";
    }

    //figure out the skip count, use the (pageIndex - 1) * resultsPerPage
    //ie pageIndex = 3, 10 results per page, we'll set start to (3-1)*10 = 20
    int start = (Integer.parseInt(pageIndex) - 1) * Integer.parseInt(resultsPerPage);
    params.add(CommonParams.START, start);

    String doctype = (String) params.get(PARAM_DOCTYPE);
    if (doctype == null || doctype.trim().length() < 1) {
        doctype = "";
    }

    String facetQuery = (String) params.get(PARAM_FACET_QUERY);
    if (facetQuery != null && facetQuery.trim().length() > 0) {
        //facetQuery is pre formatted and correct, just add it as a filter query
        params.add(CommonParams.FQ, facetQuery);
    }

    String suggestedQuery = (String) params.get(PARAM_SUGGESTED_QUERY);
    if (suggestedQuery != null && suggestedQuery.trim().length() > 0) {
        //suggestedQuery is pre formatted and correct, just add it as a filter query
        params.add(CommonParams.FQ, suggestedQuery);
    }

    //base fields
    params.add(DisMaxParams.QF, "nyp:DocumentType");
    params.add(DisMaxParams.QF, "nyp:Notes");

    //program fields
    params.add(DisMaxParams.QF, "npp:ProgramID");
    params.add(DisMaxParams.QF, "npp:Season");
    params.add(DisMaxParams.QF, "npp:OrchestraCode");
    params.add(DisMaxParams.QF, "npp:OrchestraName");
    params.add(DisMaxParams.QF, "npp:LocationName");
    params.add(DisMaxParams.QF, "npp:VenueName");
    params.add(DisMaxParams.QF, "npp:EventTypeName");
    params.add(DisMaxParams.QF, "npp:SubEventName");
    params.add(DisMaxParams.QF, "npp:ConductorName");
    params.add(DisMaxParams.QF, "npp:SoloistsNames");
    params.add(DisMaxParams.QF, "npp:SoloistsInstrumentName");
    params.add(DisMaxParams.QF, "npp:WorksComposerNames");
    params.add(DisMaxParams.QF, "npp:WorksTitle");
    params.add(DisMaxParams.QF, "npp:WorksShortTitle");
    params.add(DisMaxParams.QF, "npp:WorksConductorNames");

    //printedMusic fields
    params.add(DisMaxParams.QF, "npm:LibraryID");
    params.add(DisMaxParams.QF, "npm:ShortTitle");
    params.add(DisMaxParams.QF, "npm:ComposerName");
    params.add(DisMaxParams.QF, "npm:PublisherName");
    params.add(DisMaxParams.QF, "npm:ComposerNameTitle");
    params.add(DisMaxParams.QF, "npm:ScoreMarkingArtist");
    params.add(DisMaxParams.QF, "npm:ScoreEditionTypeDesc");
    params.add(DisMaxParams.QF, "npm:ScoreNotes");

    //part fields
    params.add(DisMaxParams.QF, "npm:PartTypeDesc");
    params.add(DisMaxParams.QF, "npm:PartMarkingArtist");
    params.add(DisMaxParams.QF, "npm:UsedByArtistName");

    //businessRecord fields
    params.add(DisMaxParams.QF, "npb:BoxNumber");
    params.add(DisMaxParams.QF, "npb:RecordGroup");
    params.add(DisMaxParams.QF, "npb:Series");
    params.add(DisMaxParams.QF, "npb:SubSeries");
    params.add(DisMaxParams.QF, "npb:Folder");
    params.add(DisMaxParams.QF, "npb:Names");
    params.add(DisMaxParams.QF, "npb:Subject");
    params.add(DisMaxParams.QF, "npb:Abstract");

    //visual fields
    params.add(DisMaxParams.QF, "npv:ID");
    params.add(DisMaxParams.QF, "npv:BoxNumber");
    params.add(DisMaxParams.QF, "npv:PhilharmonicSource");
    params.add(DisMaxParams.QF, "npv:OutsideSource");
    params.add(DisMaxParams.QF, "npv:Photographer");
    params.add(DisMaxParams.QF, "npv:CopyrightHolder");
    params.add(DisMaxParams.QF, "npv:PlaceOfImage");
    params.add(DisMaxParams.QF, "npv:PersonalNames");
    params.add(DisMaxParams.QF, "npv:CorporateNames");
    params.add(DisMaxParams.QF, "npv:Event");
    params.add(DisMaxParams.QF, "npv:ImageType");
    params.add(DisMaxParams.QF, "npv:LocationName");
    params.add(DisMaxParams.QF, "npv:VenueName");

    //audio fields
    params.add(DisMaxParams.QF, "npa:ProgramID");
    params.add(DisMaxParams.QF, "npa:Location");
    params.add(DisMaxParams.QF, "npa:EventTypeName");
    params.add(DisMaxParams.QF, "npa:ConductorName");
    params.add(DisMaxParams.QF, "npa:SoloistsAndInstruments");
    params.add(DisMaxParams.QF, "npa:ComposerWork");
    params.add(DisMaxParams.QF, "npa:OrchestraName");
    params.add(DisMaxParams.QF, "npa:IntermissionFeature");
    params.add(DisMaxParams.QF, "npa:LocationName");
    params.add(DisMaxParams.QF, "npa:VenueName");
    params.add(DisMaxParams.QF, "npa:SubEventName");
    params.add(DisMaxParams.QF, "npa:URLLocation");
    params.add(DisMaxParams.QF, "npa:IntermissionGuests");
    params.add(DisMaxParams.QF, "npa:Announcer");

    //video fields
    params.add(DisMaxParams.QF, "npx:ProgramID");
    params.add(DisMaxParams.QF, "npx:Location");
    params.add(DisMaxParams.QF, "npx:EventTypeName");
    params.add(DisMaxParams.QF, "npx:ConductorName");
    params.add(DisMaxParams.QF, "npx:SoloistsAndInstruments");
    params.add(DisMaxParams.QF, "npx:ComposerNameWork");
    params.add(DisMaxParams.QF, "npx:OrchestraName");
    params.add(DisMaxParams.QF, "npx:IntermissionFeature");
    params.add(DisMaxParams.QF, "npx:LocationName");
    params.add(DisMaxParams.QF, "npx:VenueName");
    params.add(DisMaxParams.QF, "npx:SubEventName");
    params.add(DisMaxParams.QF, "npx:IntermissionGuests");
    params.add(DisMaxParams.QF, "npx:Announcer");

    params.add(DisMaxParams.QF, "npt:tagged");

    //add in all the restrictions that can be applied to a document type count in the same filter query

    //this includes all DATE range queries
    //if a date query does not exist, the TYPE is queried so all results are shown
    //restriction is applied to business records so only the web publishable items are shown
    StringBuffer filterTypesCountsQuery = new StringBuffer();

    if (dateFrom != null && dateTo != null) {
        //program date query
        filterTypesCountsQuery.append("(npp\\:Date:[" + solrField.toExternal(dateFrom) + " TO "
                + solrField.toExternal(dateTo) + "])");

        //printedMusic (scores) and parts have no date range, so just or in the type so those results come back
        filterTypesCountsQuery.append(" OR (nyp\\:DocumentType:Printed Music)");
        filterTypesCountsQuery.append(" OR (nyp\\:DocumentType:Part)");

        //business record range AND web publishable restriction
        filterTypesCountsQuery
                .append(" OR (nyp\\:WebPublishable:true AND ((npb\\:DateFrom:[" + solrField.toExternal(dateFrom)
                        + " TO *] AND npb\\:DateTo:[* TO " + solrField.toExternal(dateTo) + "])");
        filterTypesCountsQuery.append(" OR (npb\\:DateFrom:[* TO " + solrField.toExternal(dateFrom)
                + "] AND npb\\:DateTo:[* TO " + solrField.toExternal(dateTo) + "] AND npb\\:DateTo:["
                + solrField.toExternal(dateFrom) + " TO *])");
        filterTypesCountsQuery.append(" OR (npb\\:DateFrom:[" + solrField.toExternal(dateFrom)
                + " TO *] AND npb\\:DateTo:[" + solrField.toExternal(dateTo)
                + " TO *] AND npb\\:DateFrom:[* TO " + solrField.toExternal(dateFrom) + "])))");

        //visual
        filterTypesCountsQuery.append(" OR ((npv\\:DateFrom:[" + solrField.toExternal(dateFrom)
                + " TO *] AND npv\\:DateTo:[* TO " + solrField.toExternal(dateTo) + "])");
        filterTypesCountsQuery.append(" OR (npv\\:DateFrom:[* TO " + solrField.toExternal(dateFrom)
                + "] AND npv\\:DateTo:[* TO " + solrField.toExternal(dateTo) + "] AND npv\\:DateTo:["
                + solrField.toExternal(dateFrom) + " TO *])");
        filterTypesCountsQuery.append(" OR (npv\\:DateFrom:[" + solrField.toExternal(dateFrom)
                + " TO *] AND npv\\:DateTo:[" + solrField.toExternal(dateTo)
                + " TO *] AND npv\\:DateFrom:[* TO " + solrField.toExternal(dateFrom) + "]))");

        //audio
        filterTypesCountsQuery.append(" OR (npa\\:Date:[" + solrField.toExternal(dateFrom) + " TO "
                + solrField.toExternal(dateTo) + "])");

        //video
        filterTypesCountsQuery.append(" OR (npx\\:Date:[" + solrField.toExternal(dateFrom) + " TO "
                + solrField.toExternal(dateTo) + "])");
    } else {
        //no date queries, just perform query based on type restrictions            
        filterTypesCountsQuery.append(
                "(nyp\\:DocumentType:Program) OR (nyp\\:DocumentType:Printed Music)  OR (nyp\\:DocumentType:Part) OR (nyp\\:DocumentType:Business Record AND nyp\\:WebPublishable:true)");
        filterTypesCountsQuery.append(
                " OR (nyp\\:DocumentType:Visual) OR (nyp\\:DocumentType:Audio) OR (nyp\\:DocumentType:Video)");
    }

    params.add(CommonParams.FQ, filterTypesCountsQuery);

    //default facet for document type that will always return the counts regardless of filter queries applied
    params.add(FacetParams.FACET_FIELD, "{!ex=test}nyp:DocumentType_facet");
    //allow this facet to always display all values, even when there are 0 results for the type
    params.add("f.nyp:DocumentType_facet.facet.mincount", 0);

    if (doctype.equalsIgnoreCase("program")) {
        //set the document type restriction
        params.add(CommonParams.FQ, "{!tag=test}nyp\\:DocumentType:Program");

        if (generateFacets) {
            params.add(FacetParams.FACET_FIELD, "npp:ConductorName_facet");
            params.add(FacetParams.FACET_FIELD, "npp:SoloistsNames_facet");
            params.add(FacetParams.FACET_FIELD, "npp:WorksComposerNames_facet");
            params.add(FacetParams.FACET_FIELD, "npp:LocationName_facet");
            params.add(FacetParams.FACET_FIELD, "npp:VenueName_facet");
            params.add(FacetParams.FACET_FIELD, "npp:EventTypeName_facet");
            params.add(FacetParams.FACET_FIELD, "npp:Season_facet");
        }
    } else if (doctype.equalsIgnoreCase("printedMusic")) {
        //set the document type restriction
        params.add(CommonParams.FQ, "{!tag=test}nyp\\:DocumentType:Printed Music");

        if (generateFacets) {
            params.add(FacetParams.FACET_FIELD, "npm:ScoreMarkingArtist_facet");
            params.add(FacetParams.FACET_FIELD, "npm:ComposerName_facet");
        }

    } else if (doctype.equalsIgnoreCase("part")) {
        //set the document type restriction
        params.add(CommonParams.FQ, "{!tag=test}nyp\\:DocumentType:Part");

        if (generateFacets) {
            params.add(FacetParams.FACET_FIELD, "npm:ComposerName_facet");
            params.add(FacetParams.FACET_FIELD, "npm:UsedByArtistName_facet");
            params.add(FacetParams.FACET_FIELD, "npm:PartMarkingArtist_facet");
            params.add(FacetParams.FACET_FIELD, "npm:PartTypeDesc_facet");
        }

        // if sort parameter has been supplied (e.g. non-facet search) - apply additional part sorting
        if (sortColumn != null && sortOrder != null) {
            logger.debug("Addition additional sort parameter for PART type...");
            String prevParams = (String) params.get(CommonParams.SORT);
            String newParams = prevParams + ", npm:PartID ASC";
            params.add(CommonParams.SORT, newParams);
        }

    } else if (doctype.equalsIgnoreCase("businessRecord")) {
        //set the document type restriction
        params.add(CommonParams.FQ, "{!tag=test}nyp\\:DocumentType:Business Record");

        if (generateFacets) {
            params.add(FacetParams.FACET_FIELD, "npb:Names_facet");
            params.add(FacetParams.FACET_FIELD, "npb:Subject_facet");
            params.add(FacetParams.FACET_FIELD, "npb:RecordGroup_facet");
            params.add(FacetParams.FACET_FIELD, "npb:Series_facet");
            params.add(FacetParams.FACET_FIELD, "npb:SubSeries_facet");
        }

    } else if (doctype.equalsIgnoreCase("visual")) {
        //set the document type restriction
        params.add(CommonParams.FQ, "{!tag=test}nyp\\:DocumentType:Visual");

        if (generateFacets) {
            params.add(FacetParams.FACET_FIELD, "npv:Photographer_facet");
            params.add(FacetParams.FACET_FIELD, "npv:CopyrightHolder_facet");
            params.add(FacetParams.FACET_FIELD, "npv:ImageType_facet");
            params.add(FacetParams.FACET_FIELD, "npv:PlaceOfImage_facet");
            params.add(FacetParams.FACET_FIELD, "npv:Event_facet");
            params.add(FacetParams.FACET_FIELD, "npv:PersonalNames_facet");
            params.add(FacetParams.FACET_FIELD, "npv:LocationName_facet");
            params.add(FacetParams.FACET_FIELD, "npv:VenueName_facet");
        }

    } else if (doctype.equalsIgnoreCase("audio")) {
        //set the document type restriction
        params.add(CommonParams.FQ, "{!tag=test}nyp\\:DocumentType:Audio");

        //no facets being generated
    } else if (doctype.equalsIgnoreCase("video")) {
        //set the document type restriction
        params.add(CommonParams.FQ, "{!tag=test}nyp\\:DocumentType:Video");

        //no facets being generated
    } else {
        logger.error("Invalid document type: " + doctype);
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Invalid document type: " + doctype);
    }

    if (logger.isDebugEnabled()) {
        logger.debug("Params: " + params);
    }

    //set the new request parameters, then call the default handler behavior
    req.setParams(SolrParams.toSolrParams(params));

    super.handleRequestBody(req, rsp);
}