Example usage for org.apache.solr.common.util SimpleOrderedMap SimpleOrderedMap

List of usage examples for org.apache.solr.common.util SimpleOrderedMap SimpleOrderedMap

Introduction

In this page you can find the example usage for org.apache.solr.common.util SimpleOrderedMap SimpleOrderedMap.

Prototype

public SimpleOrderedMap() 

Source Link

Document

Creates an empty instance

Usage

From source file:alba.components.FilteredShowFileRequestHandler.java

License:Apache License

private void showFromZooKeeper(SolrQueryRequest req, SolrQueryResponse rsp, CoreContainer coreContainer)
        throws KeeperException, InterruptedException, UnsupportedEncodingException {

    SolrZkClient zkClient = coreContainer.getZkController().getZkClient();

    String adminFile = getAdminFileFromZooKeeper(req, rsp, zkClient, hiddenFiles);

    if (adminFile == null) {
        return;/*w w w. j  ava2s. c  om*/
    }

    // Show a directory listing
    List<String> children = zkClient.getChildren(adminFile, null, true);
    if (children.size() > 0) {

        NamedList<SimpleOrderedMap<Object>> files = new SimpleOrderedMap<>();
        for (String f : children) {
            if (isHiddenFile(req, rsp, f, false, hiddenFiles)) {
                continue;
            }

            SimpleOrderedMap<Object> fileInfo = new SimpleOrderedMap<>();
            files.add(f, fileInfo);
            List<String> fchildren = zkClient.getChildren(adminFile + "/" + f, null, true);
            if (fchildren.size() > 0) {
                fileInfo.add("directory", true);
            } else {
                // TODO? content type
                fileInfo.add("size", f.length());
            }
            // TODO: ?
            // fileInfo.add( "modified", new Date( f.lastModified() ) );
        }
        rsp.add("files", files);
    } else {
        // Include the file contents
        // The file logic depends on RawResponseWriter, so force its use.
        ModifiableSolrParams params = new ModifiableSolrParams(req.getParams());
        params.set(CommonParams.WT, "raw");
        req.setParams(params);

        ContentStreamBase content = new ContentStreamBase.ByteArrayStream(
                zkClient.getData(adminFile, null, null, true), adminFile);
        content.setContentType(req.getParams().get(USE_CONTENT_TYPE));

        // Velocity parsing here!

        // http://velocity.apache.org/engine/devel/developer-guide.html#The_Context
        Velocity.init();

        VelocityContext context = new VelocityContext();

        //add some vars??
        //context.put( "context", new String("Velocity") );

        for (int i = 0; i < rsp.getValues().size(); i++) {
            context.put(rsp.getValues().getName(i), rsp.getValues().getVal(i));
        }

        Template template = null;

        String fname = req.getParams().get("file", null);

        try {
            //TODO what if fname is null?
            template = Velocity.getTemplate(fname);
        } catch (ResourceNotFoundException rnfe) {
            // couldn't find the template, try to load it

            // TODO it should be fired only for SOME mimetypes (..through an annotation??)
            StringBuilder sb = this.getTemplate(content);

            RuntimeServices runtimeServices = RuntimeSingleton.getRuntimeServices();
            StringReader reader = new StringReader(sb.toString());
            SimpleNode node = null;
            try {
                node = runtimeServices.parse(reader, fname);
            } catch (ParseException e) {
                // TODO Auto-generated catch block
                logger.error("error while parsing new template", e);
            }

            template = new Template();

            template.setRuntimeServices(runtimeServices);

            if (node != null) {
                template.setData(node);
            } else {
                logger.error("node null, can't set on template");
            }

            template.initDocument();

        } catch (ParseErrorException pee) {
            // syntax error: problem parsing the template
            logger.error("error while parsing template: ", pee);

        } catch (MethodInvocationException mie) {
            // something invoked in the template
            // threw an exception
            logger.error("error while parsing temaplate: ", mie);
        } catch (Exception e) {
            logger.error("error while parsing temaplate: ", e);
        }

        StringWriter sw = new StringWriter();

        template.merge(context, sw);

        // http://stackoverflow.com/questions/18571223/how-to-convert-java-string-into-byte
        content = new ContentStreamBase.ByteArrayStream(
                sw.getBuffer().toString().getBytes(Charset.forName("UTF-8")), adminFile);
        content.setContentType(req.getParams().get(USE_CONTENT_TYPE));

        rsp.add(RawResponseWriter.CONTENT, content);
    }
    rsp.setHttpCaching(false);
}

From source file:alba.components.FilteredShowFileRequestHandler.java

License:Apache License

private void showFromFileSystem(SolrQueryRequest req, SolrQueryResponse rsp) {
    File adminFile = getAdminFileFromFileSystem(req, rsp, hiddenFiles);

    if (adminFile == null) { // exception already recorded
        return;/*from   ww w  . j a v  a  2s  .c  o m*/
    }

    // Make sure the file exists, is readable and is not a hidden file
    if (!adminFile.exists()) {
        log.error("Can not find: " + adminFile.getName() + " [" + adminFile.getAbsolutePath() + "]");
        rsp.setException(new SolrException(ErrorCode.NOT_FOUND,
                "Can not find: " + adminFile.getName() + " [" + adminFile.getAbsolutePath() + "]"));
        return;
    }
    if (!adminFile.canRead() || adminFile.isHidden()) {
        log.error("Can not show: " + adminFile.getName() + " [" + adminFile.getAbsolutePath() + "]");
        rsp.setException(new SolrException(ErrorCode.NOT_FOUND,
                "Can not show: " + adminFile.getName() + " [" + adminFile.getAbsolutePath() + "]"));
        return;
    }

    // Show a directory listing
    if (adminFile.isDirectory()) {
        // it's really a directory, just go for it.
        int basePath = adminFile.getAbsolutePath().length() + 1;
        NamedList<SimpleOrderedMap<Object>> files = new SimpleOrderedMap<>();
        for (File f : adminFile.listFiles()) {
            String path = f.getAbsolutePath().substring(basePath);
            path = path.replace('\\', '/'); // normalize slashes

            if (isHiddenFile(req, rsp, f.getName().replace('\\', '/'), false, hiddenFiles)) {
                continue;
            }

            SimpleOrderedMap<Object> fileInfo = new SimpleOrderedMap<>();
            files.add(path, fileInfo);
            if (f.isDirectory()) {
                fileInfo.add("directory", true);
            } else {
                // TODO? content type
                fileInfo.add("size", f.length());
            }
            fileInfo.add("modified", new Date(f.lastModified()));
        }
        rsp.add("files", files);
    } else {
        // Include the file contents
        //The file logic depends on RawResponseWriter, so force its use.
        ModifiableSolrParams params = new ModifiableSolrParams(req.getParams());
        params.set(CommonParams.WT, "raw");
        req.setParams(params);

        ContentStreamBase content = new ContentStreamBase.FileStream(adminFile);
        content.setContentType(req.getParams().get(USE_CONTENT_TYPE));

        rsp.add(RawResponseWriter.CONTENT, content);
    }
    rsp.setHttpCaching(false);
}

From source file:com.doculibre.constellio.opensearch.OpenSearchSolrServer.java

License:Open Source License

@Override
public NamedList<Object> request(SolrRequest request) throws SolrServerException, IOException {
    SolrParams params = request.getParams();
    if (params == null) {
        params = new ModifiableSolrParams();
    }/*  www .  j a  v  a  2 s  .com*/

    String openSearchURL = params.get("openSearchURL");
    if (openSearchURL == null) {
        throw new SolrServerException("openSearchURL param is missing");
    }
    String query = params.get(CommonParams.Q);
    int start = params.getInt(CommonParams.START, 0);
    int hitsPerPage = params.getInt(CommonParams.ROWS, 10);
    String lang = params.get("lang");

    Map<String, String> paramsMap = new HashMap<String, String>();
    if (SimpleSearch.SEARCH_ALL.equals(query)) {
        query = "url:http";
    }
    paramsMap.put("query", query);
    if (StringUtils.isNotBlank(lang)) {
        paramsMap.put("lang", "" + lang);
    }
    paramsMap.put("start", "" + start);
    paramsMap.put("hitsPerPage", "" + hitsPerPage);
    // FIXME
    paramsMap.put("hitsPerDup", "" + Integer.MAX_VALUE);
    Element rootElement = sendGet(openSearchURL, paramsMap);

    SolrDocumentList solrDocumentList = parse(rootElement);
    SimpleOrderedMap<Object> result = new SimpleOrderedMap<Object>();
    result.add("response", solrDocumentList);
    return result;
}

From source file:com.francelabs.datafari.statistics.StatsProcessor.java

License:Apache License

public static void processStatsResponse(final QueryResponse queryResponse) throws Exception {
    final NamedList responseHeader = queryResponse.getResponseHeader();
    final FacetField QFacet = queryResponse.getFacetField("q");

    final Long numTot = queryResponse.getResults().getNumFound();

    final SolrDocumentList solrDocumentList = new SolrDocumentList();
    solrDocumentList.setNumFound(QFacet.getValueCount());
    solrDocumentList.setStart(0);//  www  . j ava 2  s  .  c o m

    if (numTot != 0) {
        final Map<String, FieldStatsInfo> stats = queryResponse.getFieldStatsInfo();
        final List<FieldStatsInfo> noHitsStats = stats.get("noHits").getFacets().get("q");
        final List<FieldStatsInfo> QTimeStats = stats.get("QTime").getFacets().get("q");
        List<FieldStatsInfo> positionClickTotStats = null;
        try {
            positionClickTotStats = stats.get("positionClickTot").getFacets().get("q");
        } catch (final Exception e) {

        }
        final List<FieldStatsInfo> clickStats = stats.get("click").getFacets().get("q");
        final List<FieldStatsInfo> numClicksStats = stats.get("numClicks").getFacets().get("q");
        final List<FieldStatsInfo> numFoundStats = stats.get("numFound").getFacets().get("q");

        final List<Count> QFacetValues = QFacet.getValues();

        final Map<String, SolrDocument> mapDocuments = new HashMap<String, SolrDocument>();

        for (int i = 0; i < QFacetValues.size(); i++) {
            final SolrDocument doc = new SolrDocument();
            final String query = QFacetValues.get(i).getName();

            final double count = QFacetValues.get(i).getCount();
            final double frequency = StatsUtils.round(count * 100 / numTot, 2, BigDecimal.ROUND_HALF_UP);

            doc.addField("query", query);

            doc.addField("count", count);
            doc.addField("frequency", frequency);
            mapDocuments.put(query, doc);
            solrDocumentList.add(doc);
        }

        for (int i = 0; i < QTimeStats.size(); i++) {
            final String query = QTimeStats.get(i).getName();
            final SolrDocument doc = mapDocuments.get(query);

            final int AVGHits = new Double((Double) numFoundStats.get(i).getMean()).intValue();
            final Double noHits = new Double((Double) noHitsStats.get(i).getSum());
            final int AVGQTime = new Double((Double) QTimeStats.get(i).getMean()).intValue();
            final int MAXQTime = new Double((Double) QTimeStats.get(i).getMax()).intValue();
            final double click = new Double((Double) clickStats.get(i).getSum());
            final double clickRatio = StatsUtils.round(click * 100 / (Double) doc.getFirstValue("count"), 2,
                    BigDecimal.ROUND_HALF_UP);
            if (click > 0) {
                final double AVGClickPosition = new Double((Double) positionClickTotStats.get(i).getSum()
                        / (Double) numClicksStats.get(i).getSum()).intValue();

                doc.addField("AVGClickPosition", AVGClickPosition);

            } else {
                doc.addField("AVGClickPosition", "-");
            }

            doc.addField("withClickRatio", clickRatio);
            doc.addField("AVGHits", AVGHits);
            doc.addField("numNoHits", noHits);
            doc.addField("withClick", click);
            doc.addField("AVGQTime", AVGQTime);
            doc.addField("MaxQTime", MAXQTime);
        }

    }

    final NamedList<Object> response = new SimpleOrderedMap<Object>();
    response.add("responseHeader", responseHeader);
    response.add("response", solrDocumentList);
    queryResponse.setResponse(response);
}

From source file:com.frank.search.common.PagerComponent.java

License:GNU General Public License

@Override
@SuppressWarnings("unchecked")
public void process(ResponseBuilder rb) throws IOException {
    /* get request params */
    SolrParams par = rb.req.getParams();
    int rows = par.getInt(CommonParams.ROWS, 0);
    int start = par.getInt(CommonParams.START, 0);
    int pages = par.getInt(PARAM_PAGER, 0);
    int pages_pre = par.getInt(PARAM_PAGER_PRE, 2);

    /* neet to work ? */
    if (pages == 0 || rows == 0)
        return;/*from www  .  j  a va 2  s  . c om*/

    /* pager list */
    NamedList lst = new SimpleOrderedMap<Object>();
    NamedList lst2 = new SimpleOrderedMap<Object>();

    /* paging pages */
    int doc_count = rb.getResults().docSet.size();
    int page_count = doc_count / rows;
    int page_actual = start / rows;
    int page_pre = pages_pre;
    int page_post = pages - page_pre - 1;

    /* page range */
    if (page_actual - page_pre < 0) {
        page_post += -(page_actual - page_pre);
        page_pre -= -(page_actual - page_pre);
    } else if (page_actual + page_post > page_count) {
        page_post = pages - page_pre;
        page_pre = page_actual + pages - page_count;
    }

    /* sanity */
    if (page_pre < 0)
        page_pre = 0;
    if (page_post < 0)
        page_post = 0;

    /* next pages list */
    int i = (page_actual - page_pre);
    for (i = (i <= 0 ? 0 : i); i <= page_count && i <= (page_actual + page_post); i++)
        lst2.add(Integer.toString(i + 1), i * rows);
    lst.add("pages", lst2);

    /* navi */
    if (page_actual > 0)
        lst.add("prev", (page_actual - 1) * rows);
    if (page_actual - page_pre > 0)
        lst.add("first", 0);
    if (page_actual < page_count)
        lst.add("next", (page_actual + 1) * rows);
    if (page_actual + page_post < page_count)
        lst.add("last", page_count * rows);
    lst.add("actual", page_actual + 1);
    lst.add("count", page_count);

    /* finish */
    rb.rsp.add("pager", lst);
}

From source file:com.o19s.solr.swan.highlight.SwanHighlighter.java

License:Apache License

/**
 * Generates a list of Highlighted query fragments for each item in a list
 * of documents, or returns null if highlighting is disabled.
 *
 * @param docs query results//from  w  ww .j a  v  a 2 s . co m
 * @param query the query
 * @param req the current request
 * @param defaultFields default list of fields to summarize
 *
 * @return NamedList containing a NamedList for each document, which in
 * turns contains sets (field, summary) pairs.
 */
@Override
@SuppressWarnings("unchecked")
public NamedList<Object> doHighlighting(DocList docs, Query query, SolrQueryRequest req, String[] defaultFields)
        throws IOException {

    NamedList fragments = new SimpleOrderedMap();

    SolrParams params = req.getParams();
    if (!isHighlightingEnabled(params))
        return null;

    SolrIndexSearcher searcher = req.getSearcher();
    IndexSchema schema = searcher.getSchema();
    String[] fieldNames = getHighlightFields(query, req, defaultFields);
    Set<String> fset = new HashSet<String>();

    {
        // pre-fetch documents using the Searcher's doc cache
        Collections.addAll(fset, fieldNames);
        // fetch unique key if one exists.
        SchemaField keyField = schema.getUniqueKeyField();
        if (null != keyField)
            fset.add(keyField.getName());
    }

    //CHANGE start
    //       int[] docIds = new int[docs.swordize()];
    TreeSet<Integer> docIds = new TreeSet<Integer>();
    DocIterator iterator = docs.iterator();
    for (int i = 0; i < docs.size(); i++) {
        docIds.add(iterator.nextDoc());
    }
    // Get Frag list builder
    String fragListBuilderString = params.get(HighlightParams.FRAG_LIST_BUILDER).toLowerCase();
    FragListBuilder fragListBuilder;
    if (fragListBuilderString.equals("single")) {
        fragListBuilder = new SingleFragListBuilder();
    } else {
        fragListBuilder = new com.o19s.solr.swan.highlight.SimpleFragListBuilder();
    }

    // get FastVectorHighlighter instance out of the processing loop
    SpanAwareFastVectorHighlighter safvh = new SpanAwareFastVectorHighlighter(
            // FVH cannot process hl.usePhraseHighlighter parameter per-field basis
            params.getBool(HighlightParams.USE_PHRASE_HIGHLIGHTER, true),
            // FVH cannot process hl.requireFieldMatch parameter per-field basis
            params.getBool(HighlightParams.FIELD_MATCH, false), fragListBuilder,
            //new com.o19s.solr.swan.highlight.ScoreOrderFragmentsBuilder(),
            new WordHashFragmentsBuilder(),
            // List of docIds to filter spans
            docIds);
    safvh.setPhraseLimit(params.getInt(HighlightParams.PHRASE_LIMIT, Integer.MAX_VALUE));
    SpanAwareFieldQuery fieldQuery = safvh.getFieldQuery(query, searcher.getIndexReader(), docIds);

    // Highlight each document
    for (int docId : docIds) {
        Document doc = searcher.doc(docId, fset);
        NamedList docSummaries = new SimpleOrderedMap();
        for (String fieldName : fieldNames) {
            fieldName = fieldName.trim();
            if (useFastVectorHighlighter(params, schema, fieldName))
                doHighlightingByFastVectorHighlighter(safvh, fieldQuery, req, docSummaries, docId, doc,
                        fieldName);
            else
                doHighlightingByHighlighter(query, req, docSummaries, docId, doc, fieldName);
        }
        String printId = schema.printableUniqueKey(doc);
        fragments.add(printId == null ? null : printId, docSummaries);
    }
    //CHANGE end
    return fragments;
}

From source file:com.searchbox.solr.CategoryLikeThis.java

License:Apache License

@Override
public NamedList<Object> getStatistics() {

    NamedList all = new SimpleOrderedMap<Object>();
    all.add("requests", "" + numRequests);
    all.add("errors", "" + numErrors);
    all.add("totalTime(ms)", "" + totalTime);
    all.add("empty", "" + numEmpty);

    if (numRequests != 0) {
        all.add("averageFiltered", "" + numFiltered / numRequests);
        all.add("averageSubset", "" + numSubset / numRequests);
        all.add("avgTimePerRequest", "" + totalTime / numRequests);
        all.add("avgRequestsPerSecond", "" + numRequests / (totalTime * 0.001));
    } else {/*from  ww  w  . j a  va  2s .c o  m*/
        all.add("averageFiltered", "" + 0);
        all.add("averageSubset", "" + 0);
        all.add("avgTimePerRequest", "" + 0);
        all.add("avgRequestsPerSecond", "" + 0);
    }

    return all;
}

From source file:com.searchbox.solr.SenseLikeThisHandler.java

License:Apache License

@Override
public NamedList<Object> getStatistics() {

    NamedList all = new SimpleOrderedMap<Object>();
    all.add("requests", "" + numRequests);
    all.add("errors", "" + numErrors);
    all.add("totalTime(ms)", "" + totalTime);
    all.add("empty", "" + numEmpty);

    if (numRequests != 0) {
        all.add("averageFiltered", "" + (float) numFiltered / numRequests);
        all.add("averageSubset", "" + (float) numSubset / numRequests);

        all.add("totalTermsConsidered", numTermsConsidered);
        all.add("avgTermsConsidered", (float) numTermsConsidered / numRequests);

        all.add("totalTermsUsed", numTermsConsidered);
        all.add("avgTermsUsed", (float) numTermsUsed / numRequests);

        all.add("avgTimePerRequest", "" + (float) totalTime / numRequests);
        all.add("avgRequestsPerSecond", "" + (float) numRequests / (totalTime * 0.001));
    } else {/* w w  w.jav a 2  s.co m*/
        all.add("averageFiltered", "" + 0);
        all.add("averageSubset", "" + 0);
        all.add("avgTimePerRequest", "" + 0);
        all.add("avgRequestsPerSecond", "" + 0);
    }

    return all;
}

From source file:com.searchbox.solr.SenseQueryHandler.java

private static NamedList<Explanation> getExplanations(Query query, DocList docs, SolrIndexSearcher searcher,
        IndexSchema schema) throws IOException {

    NamedList<Explanation> explainList = new SimpleOrderedMap<Explanation>();
    DocIterator iterator = docs.iterator();
    for (int i = 0; i < docs.size(); i++) {
        int id = iterator.nextDoc();

        Document doc = searcher.doc(id);
        String strid = schema.printableUniqueKey(doc);
        explainList.add(strid, searcher.explain(query, id));
    }/* ww w .  j a  va  2 s  .  com*/
    return explainList;
}

From source file:com.searchbox.solr.SenseQueryHandler.java

@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
    NamedList<Object> timinginfo = new NamedList<Object>();
    numRequests++;//from   www.  ja  va 2s.  c om
    long startTime = System.currentTimeMillis();
    long lstartTime = System.currentTimeMillis();

    if (!keystate) {
        LOGGER.error(
                "License key failure, not performing sense query. Please email contact@searchbox.com for more information.");
        return;
    }

    boolean fromcache = false;

    try {
        SolrParams params = req.getParams();
        HashSet<String> toIgnore = new HashSet<String>();
        toIgnore.add("start");
        toIgnore.add("rows");
        toIgnore.add("fl");
        toIgnore.add("wt");
        toIgnore.add("indent");

        SolrCacheKey key = new SolrCacheKey(params, toIgnore);

        // Set field flags
        ReturnFields returnFields = new SolrReturnFields(req);
        rsp.setReturnFields(returnFields);
        int flags = 0;
        if (returnFields.wantsScore()) {
            flags |= SolrIndexSearcher.GET_SCORES;
        }

        String defType = params.get(QueryParsing.DEFTYPE, QParserPlugin.DEFAULT_QTYPE);
        String q = params.get(CommonParams.Q);
        Query query = null;
        QueryReductionFilter qr = null;
        List<Query> filters = new ArrayList<Query>();

        try {
            if (q != null) {
                QParser parser = QParser.getParser(q, defType, req);
                query = parser.getQuery();

            }

            String[] fqs = req.getParams().getParams(CommonParams.FQ);
            if (fqs != null && fqs.length != 0) {
                for (String fq : fqs) {
                    if (fq != null && fq.trim().length() != 0) {
                        QParser fqp = QParser.getParser(fq, null, req);
                        filters.add(fqp.getQuery());
                    }
                }
            }
        } catch (Exception e) {
            numErrors++;
            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
        }

        int start = params.getInt(CommonParams.START, 0);
        int rows = params.getInt(CommonParams.ROWS, 10);

        SenseQuery slt = null;
        if (q == null) {
            numErrors++;
            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                    "SenseLikeThis requires either a query (?q=) or text to find similar documents.");
        }

        timinginfo.add("Parse Query time", System.currentTimeMillis() - lstartTime);
        LOGGER.debug("Parsed Query Time:\t" + (System.currentTimeMillis() - lstartTime));
        lstartTime = System.currentTimeMillis();

        SolrIndexSearcher searcher = req.getSearcher();
        SolrCache sc = searcher.getCache("com.searchbox.sltcache");
        DocListAndSet sltDocs = null;
        if (sc != null) {
            //try to get from cache
            sltDocs = (DocListAndSet) sc.get(key.getSet());
        } else {
            LOGGER.error("com.searchbox.sltcache not defined, can't cache slt queries");
        }

        if (start + rows > 1000 || sltDocs == null || !params.getBool(CommonParams.CACHE, true)) { //not in cache, need to do search
            String CKBid = params.get(SenseParams.SENSE_CKB, SenseParams.SENSE_CKB_DEFAULT);
            String senseField = params.get(SenseParams.SENSE_FIELD, SenseParams.DEFAULT_SENSE_FIELD);
            RealTermFreqVector rtv = new RealTermFreqVector(q,
                    SenseQuery.getAnalyzerForField(req.getSchema(), senseField));
            timinginfo.add("Make real term freq vector", System.currentTimeMillis() - lstartTime);
            lstartTime = System.currentTimeMillis();

            qr = new QueryReductionFilter(rtv, CKBid, searcher, senseField);
            qr.setNumtermstouse(params.getInt(SenseParams.SENSE_QR_NTU, SenseParams.SENSE_QR_NTU_DEFAULT));

            numTermsUsed += qr.getNumtermstouse();
            numTermsConsidered += rtv.getSize();

            qr.setThreshold(params.getInt(SenseParams.SENSE_QR_THRESH, SenseParams.SENSE_QR_THRESH_DEFAULT));
            qr.setMaxDocSubSet(params.getInt(SenseParams.SENSE_QR_MAXDOC, SenseParams.SENSE_QR_MAXDOC_DEFAULT));
            qr.setMinDocSetSizeForFilter(
                    params.getInt(SenseParams.SENSE_MINDOC4QR, SenseParams.SENSE_MINDOC4QR_DEFAULT));

            timinginfo.add("Setup Sense query", System.currentTimeMillis() - lstartTime);
            LOGGER.debug("Setup Sense query:\t" + (System.currentTimeMillis() - lstartTime));
            lstartTime = System.currentTimeMillis();

            DocList subFiltered = qr.getSubSetToSearchIn(filters);
            timinginfo.add("Do Query Redux", System.currentTimeMillis() - lstartTime);
            LOGGER.debug("Do query redux:\t" + (System.currentTimeMillis() - lstartTime));
            lstartTime = System.currentTimeMillis();

            numFiltered += qr.getFiltered().docList.size();
            numSubset += subFiltered.size();
            LOGGER.info("Number of documents to search:\t" + subFiltered.size());

            slt = new SenseQuery(rtv, senseField, CKBid,
                    params.getFloat(SenseParams.SENSE_WEIGHT, SenseParams.DEFAULT_SENSE_WEIGHT), null);
            LOGGER.debug("Setup sense query:\t" + (System.currentTimeMillis() - lstartTime));
            timinginfo.add("Setup sense query", System.currentTimeMillis() - lstartTime);
            lstartTime = System.currentTimeMillis();

            sltDocs = searcher.getDocListAndSet(slt, subFiltered, Sort.RELEVANCE, 0, 1000, flags);
            timinginfo.add("Do sense query", System.currentTimeMillis() - lstartTime);
            lstartTime = System.currentTimeMillis();

            LOGGER.debug("Adding this keyto cache:\t" + key.getSet().toString());
            searcher.getCache("com.searchbox.sltcache").put(key.getSet(), sltDocs);

        } else {
            fromcache = true;
            timinginfo.add("Getting from cache", System.currentTimeMillis() - lstartTime);
            LOGGER.debug("Got result from cache");
            lstartTime = System.currentTimeMillis();
        }

        if (sltDocs == null) {
            numEmpty++;
            sltDocs = new DocListAndSet(); // avoid NPE
        }
        rsp.add("response", sltDocs.docList.subset(start, rows));

        // --------- OLD CODE BELOW
        // maybe facet the results
        if (params.getBool(FacetParams.FACET, false)) {
            if (sltDocs.docSet == null) {
                rsp.add("facet_counts", null);
            } else {
                SimpleFacets f = new SimpleFacets(req, sltDocs.docSet, params);
                rsp.add("facet_counts", f.getFacetCounts());
            }
        }

        // Debug info, not doing it for the moment.
        boolean dbg = req.getParams().getBool(CommonParams.DEBUG_QUERY, false);

        boolean dbgQuery = false, dbgResults = false;
        if (dbg == false) {//if it's true, we are doing everything anyway.
            String[] dbgParams = req.getParams().getParams(CommonParams.DEBUG);
            if (dbgParams != null) {
                for (int i = 0; i < dbgParams.length; i++) {
                    if (dbgParams[i].equals(CommonParams.QUERY)) {
                        dbgQuery = true;
                    } else if (dbgParams[i].equals(CommonParams.RESULTS)) {
                        dbgResults = true;
                    }
                }
            }
        } else {
            dbgQuery = true;
            dbgResults = true;
        }
        if (dbg == true) {
            try {
                NamedList dbgInfo = new SimpleOrderedMap();
                dbgInfo.add("Query freqs", slt.getAllTermsasString());
                dbgInfo.addAll(
                        getExplanations(slt, sltDocs.docList.subset(start, rows), searcher, req.getSchema()));
                if (null != filters) {
                    dbgInfo.add("filter_queries", req.getParams().getParams(CommonParams.FQ));
                    List<String> fqs = new ArrayList<String>(filters.size());
                    for (Query fq : filters) {
                        fqs.add(QueryParsing.toString(fq, req.getSchema()));
                    }
                    dbgInfo.add("parsed_filter_queries", fqs);
                }
                if (null != qr) {
                    dbgInfo.add("QueryReduction", qr.getDbgInfo());
                }
                if (null != slt) {
                    dbgInfo.add("SLT", slt.getDbgInfo());

                }
                dbgInfo.add("fromcache", fromcache);
                rsp.add("debug", dbgInfo);
                timinginfo.add("Debugging parts", System.currentTimeMillis() - lstartTime);
                dbgInfo.add("timings", timinginfo);

            } catch (Exception e) {
                SolrException.log(SolrCore.log, "Exception during debug", e);
                rsp.add("exception_during_debug", SolrException.toStr(e));
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
        numErrors++;
    } finally {
        totalTime += System.currentTimeMillis() - startTime;
    }
}