Example usage for org.apache.solr.common.util NamedList addAll

List of usage examples for org.apache.solr.common.util NamedList addAll

Introduction

In this page you can find the example usage for org.apache.solr.common.util NamedList addAll.

Prototype

public boolean addAll(NamedList<T> nl) 

Source Link

Document

Appends the elements of the given NamedList to this one.

Usage

From source file:com.pjaol.ESB.core.Controller.java

License:Apache License

@SuppressWarnings("rawtypes")
@Override/*from ww w  .j  a  v a  2 s  .co  m*/
public NamedList process(NamedList input) throws ModuleRunException {

    CountDownLatch start = new CountDownLatch(1);

    CountDownLatch stop = new CountDownLatch(pipes.size()) {
        @Override
        public boolean await(long timeout, TimeUnit unit) throws InterruptedException {

            return super.await(timeout, unit);
        }
    };

    TimerThread timer = new TimerThread(getTimeout(), getName(), "controller") {
        @Override
        public void timeout() {
            timeoutCountBean.inc(1);
            super.timeout();
            throw new RuntimeException("Timed out controller: " + getName());
        }
    };

    NamedList allOutput = new NamedList();
    List<ModuleRunner> moduleRunners = new ArrayList<ModuleRunner>();

    // Pipes have little to no value
    // TODO: should pipes be converted to run in parallel?

    for (String p : pipes) {

        // run each pipe in parallel
        List<String> pipeLinesToRun = pipelines.get(p);

        for (String pipename : pipeLinesToRun) {
            PipeLine pipeline = core.getPipeLineByName(pipename);

            // all pipelines should have a clean version of the input
            NamedList pipeLineInput = input.clone();

            ModuleRunner runner = new ModuleRunner(start, stop, pipeline, pipeLineInput, errorBean,
                    timeoutCountBean);
            executorService.execute(runner);
            moduleRunners.add(runner);

        }

        // The output of a pipeline should be added to
        // all output
        // allOutput.addAll(pipeLineOutput);

    }

    //
    // input.addAll(allOutput);

    if (_logger.isDebugEnabled())
        _logger.debug("******* Starting *******");

    long startT = System.currentTimeMillis();
    start.countDown();
    timer.start();

    try {
        executorService.shutdown();
        executorService.awaitTermination(getTimeout(), TimeUnit.MILLISECONDS);
        stop.await(getTimeout(), TimeUnit.MILLISECONDS);

    } catch (InterruptedException e) {
        // should really be thrown from the ModuleRunner method
        errorBean.inc(1);
        timeoutCountBean.inc(1);
        throw new ModuleRunException(e.getMessage());

    } finally {
        executorService.shutdownNow();
    }

    for (ModuleRunner runner : moduleRunners) {
        NamedList data = runner.getOutput();
        if (data != null)
            allOutput.addAll(data);
    }

    // System.out.println("Shutdown called with "+ allOutput+"::");

    long endT = System.currentTimeMillis();

    if (_logger.isDebugEnabled())
        _logger.debug("******* Shutting down ******* taken: " + (endT - startT) + " ms");

    // run limiters in serial
    if (limiterName != null) {
        // Input should be cloned and output added from previous pipelines
        NamedList limiterOutput = new NamedList();
        input.addAll(allOutput);
        NamedList limiterInput = input.clone();

        for (String pipeLine : limiterPipeLines) {

            PipeLine p = core.getPipeLineByName(pipeLine);
            try {
                // TODO: do i want to set this exclusively ?
                limiterOutput.addAll(p.process(limiterInput));
            } catch (Exception e) {
                errorBean.inc(1);
                throw new ModuleRunException(e.getMessage());
            }
        }

        allOutput = limiterOutput;
    }

    long timeTaken = endT - startT;

    timer.halt();
    allOutput.add("QTime", timeTaken);
    performanceBean.inc(Long.valueOf(endT - startT).intValue()); // log the
    // time
    errorBean.incCardinal(); // allow averages be calculated against all
    // requests
    timeoutCountBean.incCardinal();

    return allOutput;
}

From source file:com.pjaol.ESB.web.HTTPAPI.java

License:Apache License

@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
    resp.setCharacterEncoding("UTF-8");

    NamedList<String> input = new NamedList<String>();
    String pathInfo = httpParamsParser.getPath(req);

    try {/* ww  w.j  a v a2  s  . c om*/
        input.addAll(httpParamsParser.paramsToNamedList(req));
    } catch (Exception e) {
        throw new ServletException(e);
    }

    Controller controller = core.getControllerByUri(pathInfo);

    ExecutorService executorService = Executors.newFixedThreadPool(threadPoolSize);

    controller.setExecutorService(executorService);
    NamedList results = null;
    try {
        results = controller.process(input);
    } catch (java.util.concurrent.RejectedExecutionException ree) {
        //resp.reset();
        resp.setContentType("text/plain");
        resp.setHeader("Retry-After", "1");
        resp.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, req.getContextPath() + req.getServletPath()
                + " is currently unavailable.  Header value for 'Retry-After' sent with value '1'.");

        return;
    } catch (ModuleRunException e) {
        throw new ServletException(e);
    }

    String format = req.getParameter("format");

    Writer writer = resp.getWriter();

    if (format == null || format.equals("xml")) {
        resp.setContentType("text/xml");
        writer.write(formatters.get("xml").toOutput(results));
    } else if (format.equals("json")) {
        resp.setContentType("application/json");
        String jsonpCallback = req.getParameter("jsoncallback");
        if (jsonpCallback != null) {
            writer.write(jsonpCallback + "(");
        }

        writer.write(formatters.get("json").toOutput(results));

        if (jsonpCallback != null) {
            writer.write(");");
        }
    } else {
        Formatter formatter = formatters.get(format);
        writer.write(formatter.toOutput(results));
    }

    writer.flush();
    writer.close();
}

From source file:com.searchbox.solr.SenseQueryHandler.java

@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
    NamedList<Object> timinginfo = new NamedList<Object>();
    numRequests++;/*from w  w w.j a  va2  s  .c  o  m*/
    long startTime = System.currentTimeMillis();
    long lstartTime = System.currentTimeMillis();

    if (!keystate) {
        LOGGER.error(
                "License key failure, not performing sense query. Please email contact@searchbox.com for more information.");
        return;
    }

    boolean fromcache = false;

    try {
        SolrParams params = req.getParams();
        HashSet<String> toIgnore = new HashSet<String>();
        toIgnore.add("start");
        toIgnore.add("rows");
        toIgnore.add("fl");
        toIgnore.add("wt");
        toIgnore.add("indent");

        SolrCacheKey key = new SolrCacheKey(params, toIgnore);

        // Set field flags
        ReturnFields returnFields = new SolrReturnFields(req);
        rsp.setReturnFields(returnFields);
        int flags = 0;
        if (returnFields.wantsScore()) {
            flags |= SolrIndexSearcher.GET_SCORES;
        }

        String defType = params.get(QueryParsing.DEFTYPE, QParserPlugin.DEFAULT_QTYPE);
        String q = params.get(CommonParams.Q);
        Query query = null;
        QueryReductionFilter qr = null;
        List<Query> filters = new ArrayList<Query>();

        try {
            if (q != null) {
                QParser parser = QParser.getParser(q, defType, req);
                query = parser.getQuery();

            }

            String[] fqs = req.getParams().getParams(CommonParams.FQ);
            if (fqs != null && fqs.length != 0) {
                for (String fq : fqs) {
                    if (fq != null && fq.trim().length() != 0) {
                        QParser fqp = QParser.getParser(fq, null, req);
                        filters.add(fqp.getQuery());
                    }
                }
            }
        } catch (Exception e) {
            numErrors++;
            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
        }

        int start = params.getInt(CommonParams.START, 0);
        int rows = params.getInt(CommonParams.ROWS, 10);

        SenseQuery slt = null;
        if (q == null) {
            numErrors++;
            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                    "SenseLikeThis requires either a query (?q=) or text to find similar documents.");
        }

        timinginfo.add("Parse Query time", System.currentTimeMillis() - lstartTime);
        LOGGER.debug("Parsed Query Time:\t" + (System.currentTimeMillis() - lstartTime));
        lstartTime = System.currentTimeMillis();

        SolrIndexSearcher searcher = req.getSearcher();
        SolrCache sc = searcher.getCache("com.searchbox.sltcache");
        DocListAndSet sltDocs = null;
        if (sc != null) {
            //try to get from cache
            sltDocs = (DocListAndSet) sc.get(key.getSet());
        } else {
            LOGGER.error("com.searchbox.sltcache not defined, can't cache slt queries");
        }

        if (start + rows > 1000 || sltDocs == null || !params.getBool(CommonParams.CACHE, true)) { //not in cache, need to do search
            String CKBid = params.get(SenseParams.SENSE_CKB, SenseParams.SENSE_CKB_DEFAULT);
            String senseField = params.get(SenseParams.SENSE_FIELD, SenseParams.DEFAULT_SENSE_FIELD);
            RealTermFreqVector rtv = new RealTermFreqVector(q,
                    SenseQuery.getAnalyzerForField(req.getSchema(), senseField));
            timinginfo.add("Make real term freq vector", System.currentTimeMillis() - lstartTime);
            lstartTime = System.currentTimeMillis();

            qr = new QueryReductionFilter(rtv, CKBid, searcher, senseField);
            qr.setNumtermstouse(params.getInt(SenseParams.SENSE_QR_NTU, SenseParams.SENSE_QR_NTU_DEFAULT));

            numTermsUsed += qr.getNumtermstouse();
            numTermsConsidered += rtv.getSize();

            qr.setThreshold(params.getInt(SenseParams.SENSE_QR_THRESH, SenseParams.SENSE_QR_THRESH_DEFAULT));
            qr.setMaxDocSubSet(params.getInt(SenseParams.SENSE_QR_MAXDOC, SenseParams.SENSE_QR_MAXDOC_DEFAULT));
            qr.setMinDocSetSizeForFilter(
                    params.getInt(SenseParams.SENSE_MINDOC4QR, SenseParams.SENSE_MINDOC4QR_DEFAULT));

            timinginfo.add("Setup Sense query", System.currentTimeMillis() - lstartTime);
            LOGGER.debug("Setup Sense query:\t" + (System.currentTimeMillis() - lstartTime));
            lstartTime = System.currentTimeMillis();

            DocList subFiltered = qr.getSubSetToSearchIn(filters);
            timinginfo.add("Do Query Redux", System.currentTimeMillis() - lstartTime);
            LOGGER.debug("Do query redux:\t" + (System.currentTimeMillis() - lstartTime));
            lstartTime = System.currentTimeMillis();

            numFiltered += qr.getFiltered().docList.size();
            numSubset += subFiltered.size();
            LOGGER.info("Number of documents to search:\t" + subFiltered.size());

            slt = new SenseQuery(rtv, senseField, CKBid,
                    params.getFloat(SenseParams.SENSE_WEIGHT, SenseParams.DEFAULT_SENSE_WEIGHT), null);
            LOGGER.debug("Setup sense query:\t" + (System.currentTimeMillis() - lstartTime));
            timinginfo.add("Setup sense query", System.currentTimeMillis() - lstartTime);
            lstartTime = System.currentTimeMillis();

            sltDocs = searcher.getDocListAndSet(slt, subFiltered, Sort.RELEVANCE, 0, 1000, flags);
            timinginfo.add("Do sense query", System.currentTimeMillis() - lstartTime);
            lstartTime = System.currentTimeMillis();

            LOGGER.debug("Adding this keyto cache:\t" + key.getSet().toString());
            searcher.getCache("com.searchbox.sltcache").put(key.getSet(), sltDocs);

        } else {
            fromcache = true;
            timinginfo.add("Getting from cache", System.currentTimeMillis() - lstartTime);
            LOGGER.debug("Got result from cache");
            lstartTime = System.currentTimeMillis();
        }

        if (sltDocs == null) {
            numEmpty++;
            sltDocs = new DocListAndSet(); // avoid NPE
        }
        rsp.add("response", sltDocs.docList.subset(start, rows));

        // --------- OLD CODE BELOW
        // maybe facet the results
        if (params.getBool(FacetParams.FACET, false)) {
            if (sltDocs.docSet == null) {
                rsp.add("facet_counts", null);
            } else {
                SimpleFacets f = new SimpleFacets(req, sltDocs.docSet, params);
                rsp.add("facet_counts", f.getFacetCounts());
            }
        }

        // Debug info, not doing it for the moment.
        boolean dbg = req.getParams().getBool(CommonParams.DEBUG_QUERY, false);

        boolean dbgQuery = false, dbgResults = false;
        if (dbg == false) {//if it's true, we are doing everything anyway.
            String[] dbgParams = req.getParams().getParams(CommonParams.DEBUG);
            if (dbgParams != null) {
                for (int i = 0; i < dbgParams.length; i++) {
                    if (dbgParams[i].equals(CommonParams.QUERY)) {
                        dbgQuery = true;
                    } else if (dbgParams[i].equals(CommonParams.RESULTS)) {
                        dbgResults = true;
                    }
                }
            }
        } else {
            dbgQuery = true;
            dbgResults = true;
        }
        if (dbg == true) {
            try {
                NamedList dbgInfo = new SimpleOrderedMap();
                dbgInfo.add("Query freqs", slt.getAllTermsasString());
                dbgInfo.addAll(
                        getExplanations(slt, sltDocs.docList.subset(start, rows), searcher, req.getSchema()));
                if (null != filters) {
                    dbgInfo.add("filter_queries", req.getParams().getParams(CommonParams.FQ));
                    List<String> fqs = new ArrayList<String>(filters.size());
                    for (Query fq : filters) {
                        fqs.add(QueryParsing.toString(fq, req.getSchema()));
                    }
                    dbgInfo.add("parsed_filter_queries", fqs);
                }
                if (null != qr) {
                    dbgInfo.add("QueryReduction", qr.getDbgInfo());
                }
                if (null != slt) {
                    dbgInfo.add("SLT", slt.getDbgInfo());

                }
                dbgInfo.add("fromcache", fromcache);
                rsp.add("debug", dbgInfo);
                timinginfo.add("Debugging parts", System.currentTimeMillis() - lstartTime);
                dbgInfo.add("timings", timinginfo);

            } catch (Exception e) {
                SolrException.log(SolrCore.log, "Exception during debug", e);
                rsp.add("exception_during_debug", SolrException.toStr(e));
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
        numErrors++;
    } finally {
        totalTime += System.currentTimeMillis() - startTime;
    }
}

From source file:edu.upenn.library.solrplugins.ProofOfConceptPayloadHandler.java

License:Apache License

@Override
public NamedList<Object> mergePayload(NamedList<Object> preExisting, NamedList<Object> add,
        long preExistingCount, long addCount) {
    if (addCount != ((Number) add.remove("count")).longValue()) {
        throw new IllegalStateException("fieldType-internal and -external counts do not match");
    }/*from w  w w  .java2  s  .  co m*/
    int countIndex = preExisting.indexOf("count", 0);
    long preCount = ((Number) preExisting.getVal(countIndex)).longValue();
    preExisting.setVal(countIndex, preCount + addCount);
    preExisting.addAll(add);
    return preExisting;
}

From source file:org.alfresco.solr.component.spellcheck.AlfrescoSpellCheckBackCompatComponent.java

License:Open Source License

public void finishStage(ResponseBuilder rb) {
    if (!rb.req.getParams().getBool(SpellCheckComponent.COMPONENT_NAME, false)
            || rb.stage != ResponseBuilder.STAGE_GET_FIELDS)
        return;//from   www.  jav  a  2s.  c  om

    Map extras = new HashMap();
    for (ShardRequest sreq : rb.finished) {
        for (ShardResponse srsp : sreq.responses) {
            NamedList nl = null;

            try {
                nl = (NamedList) srsp.getSolrResponse().getResponse().get("spellcheck-extras");
            } catch (Exception e) {
                if (rb.req.getParams().getBool(ShardParams.SHARDS_TOLERANT, false)) {
                    continue; // looks like a shard did not return anything
                }
                throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
                        "Unable to read spelling info for shard: " + srsp.getShard(), e);
            }

            if (nl != null) {
                collectExtras(nl, extras);
            }
        }
    }

    if (extras.size() == 0) {
        return;
    }

    NamedList response = rb.rsp.getValues();
    NamedList spellcheck = (NamedList) response.get("spellcheck");

    if (spellcheck == null) {
        return;
    }

    NamedList collations = (NamedList) spellcheck.get("collations");
    NamedList suggestions = (NamedList) spellcheck.get("suggestions");

    if (collations != null) {
        //Fix up the collationQueryString in Solr 6
        for (int i = 0; i < collations.size(); i++) {
            if ("collation".equals(collations.getName(i))) {
                NamedList collation = (NamedList) collations.getVal(i);
                String collationQuery = (String) collation.get("collationQuery");
                String collationQueryString = (String) extras.get(collationQuery);
                collation.add("collationQueryString", collationQueryString);
            }
        }
        //Add the collations to the suggestions to support the Solr 4 format
        suggestions.addAll(collations);
    } else {
        //Fix up the collationQueryString Solr4
        for (int i = 0; i < suggestions.size(); i++) {
            if ("collation".equals(suggestions.getName(i))) {
                NamedList collation = (NamedList) suggestions.getVal(i);
                String collationQuery = (String) collation.get("collationQuery");
                String collationQueryString = (String) extras.get(collationQuery);
                collation.add("collationQueryString", collationQueryString);
            }
        }
    }
}

From source file:org.alfresco.solr.HandlerReportBuilder.java

License:Open Source License

/**
 * Adds a core summary/*ww w  . ja  va2s  .c o  m*/
 * @param cname
 * @param detail
 * @param hist
 * @param values
 * @param srv
 * @param report
 * @throws IOException
 */
public static void addCoreSummary(TrackerRegistry trackerRegistry, String cname, boolean detail, boolean hist,
        boolean values, InformationServer srv, NamedList<Object> report) throws IOException {
    NamedList<Object> coreSummary = new SimpleOrderedMap<Object>();
    coreSummary.addAll((SimpleOrderedMap<Object>) srv.getCoreStats());

    MetadataTracker metaTrkr = trackerRegistry.getTrackerForCore(cname, MetadataTracker.class);
    TrackerState metadataTrkrState = metaTrkr.getTrackerState();
    long lastIndexTxCommitTime = metadataTrkrState.getLastIndexedTxCommitTime();

    long lastIndexedTxId = metadataTrkrState.getLastIndexedTxId();
    long lastTxCommitTimeOnServer = metadataTrkrState.getLastTxCommitTimeOnServer();
    long lastTxIdOnServer = metadataTrkrState.getLastTxIdOnServer();
    Date lastIndexTxCommitDate = new Date(lastIndexTxCommitTime);
    Date lastTxOnServerDate = new Date(lastTxCommitTimeOnServer);
    long transactionsToDo = lastTxIdOnServer - lastIndexedTxId;
    if (transactionsToDo < 0) {
        transactionsToDo = 0;
    }

    AclTracker aclTrkr = trackerRegistry.getTrackerForCore(cname, AclTracker.class);
    TrackerState aclTrkrState = aclTrkr.getTrackerState();
    long lastIndexChangeSetCommitTime = aclTrkrState.getLastIndexedChangeSetCommitTime();
    long lastIndexedChangeSetId = aclTrkrState.getLastIndexedChangeSetId();
    long lastChangeSetCommitTimeOnServer = aclTrkrState.getLastChangeSetCommitTimeOnServer();
    long lastChangeSetIdOnServer = aclTrkrState.getLastChangeSetIdOnServer();
    Date lastIndexChangeSetCommitDate = new Date(lastIndexChangeSetCommitTime);
    Date lastChangeSetOnServerDate = new Date(lastChangeSetCommitTimeOnServer);
    long changeSetsToDo = lastChangeSetIdOnServer - lastIndexedChangeSetId;
    if (changeSetsToDo < 0) {
        changeSetsToDo = 0;
    }

    long nodesToDo = 0;
    long remainingTxTimeMillis = 0;
    if (transactionsToDo > 0) {
        // We now use the elapsed time as seen by the single thread farming out metadata indexing
        double meanDocsPerTx = srv.getTrackerStats().getMeanDocsPerTx();
        double meanNodeElaspedIndexTime = srv.getTrackerStats().getMeanNodeElapsedIndexTime();
        nodesToDo = (long) (transactionsToDo * meanDocsPerTx);
        remainingTxTimeMillis = (long) (nodesToDo * meanNodeElaspedIndexTime);
    }
    Date now = new Date();
    Date end = new Date(now.getTime() + remainingTxTimeMillis);
    Duration remainingTx = new Duration(now, end);

    long remainingChangeSetTimeMillis = 0;
    if (changeSetsToDo > 0) {
        // We now use the elapsed time as seen by the single thread farming out alc indexing
        double meanAclsPerChangeSet = srv.getTrackerStats().getMeanAclsPerChangeSet();
        double meanAclElapsedIndexTime = srv.getTrackerStats().getMeanAclElapsedIndexTime();
        remainingChangeSetTimeMillis = (long) (changeSetsToDo * meanAclsPerChangeSet * meanAclElapsedIndexTime);
    }
    now = new Date();
    end = new Date(now.getTime() + remainingChangeSetTimeMillis);
    Duration remainingChangeSet = new Duration(now, end);

    NamedList<Object> ftsSummary = new SimpleOrderedMap<Object>();
    long remainingContentTimeMillis = 0;
    srv.addFTSStatusCounts(ftsSummary);
    long cleanCount = ((Long) ftsSummary.get("Node count with FTSStatus Clean")).longValue();
    long dirtyCount = ((Long) ftsSummary.get("Node count with FTSStatus Dirty")).longValue();
    long newCount = ((Long) ftsSummary.get("Node count with FTSStatus New")).longValue();
    long nodesInIndex = ((Long) coreSummary.get("Alfresco Nodes in Index"));
    long contentYetToSee = nodesInIndex > 0 ? nodesToDo * (cleanCount + dirtyCount + newCount) / nodesInIndex
            : 0;
    ;
    if (dirtyCount + newCount + contentYetToSee > 0) {
        // We now use the elapsed time as seen by the single thread farming out alc indexing
        double meanContentElapsedIndexTime = srv.getTrackerStats().getMeanContentElapsedIndexTime();
        remainingContentTimeMillis = (long) ((dirtyCount + newCount + contentYetToSee)
                * meanContentElapsedIndexTime);
    }
    now = new Date();
    end = new Date(now.getTime() + remainingContentTimeMillis);
    Duration remainingContent = new Duration(now, end);
    coreSummary.add("FTS", ftsSummary);

    Duration txLag = new Duration(lastIndexTxCommitDate, lastTxOnServerDate);
    if (lastIndexTxCommitDate.compareTo(lastTxOnServerDate) > 0) {
        txLag = new Duration();
    }
    long txLagSeconds = (lastTxCommitTimeOnServer - lastIndexTxCommitTime) / 1000;
    if (txLagSeconds < 0) {
        txLagSeconds = 0;
    }

    Duration changeSetLag = new Duration(lastIndexChangeSetCommitDate, lastChangeSetOnServerDate);
    if (lastIndexChangeSetCommitDate.compareTo(lastChangeSetOnServerDate) > 0) {
        changeSetLag = new Duration();
    }
    long changeSetLagSeconds = (lastChangeSetCommitTimeOnServer - lastIndexChangeSetCommitTime) / 1000;
    if (txLagSeconds < 0) {
        txLagSeconds = 0;
    }

    ContentTracker contentTrkr = trackerRegistry.getTrackerForCore(cname, ContentTracker.class);
    TrackerState contentTrkrState = contentTrkr.getTrackerState();
    // Leave ModelTracker out of this check, because it is common
    boolean aTrackerIsRunning = aclTrkrState.isRunning() || metadataTrkrState.isRunning()
            || contentTrkrState.isRunning();
    coreSummary.add("Active", aTrackerIsRunning);

    ModelTracker modelTrkr = trackerRegistry.getModelTracker();
    TrackerState modelTrkrState = modelTrkr.getTrackerState();
    coreSummary.add("ModelTracker Active", modelTrkrState.isRunning());
    coreSummary.add("ContentTracker Active", contentTrkrState.isRunning());
    coreSummary.add("MetadataTracker Active", metadataTrkrState.isRunning());
    coreSummary.add("AclTracker Active", aclTrkrState.isRunning());

    // TX

    coreSummary.add("Last Index TX Commit Time", lastIndexTxCommitTime);
    coreSummary.add("Last Index TX Commit Date", lastIndexTxCommitDate);
    coreSummary.add("TX Lag", txLagSeconds + " s");
    coreSummary.add("TX Duration", txLag.toString());
    coreSummary.add("Timestamp for last TX on server", lastTxCommitTimeOnServer);
    coreSummary.add("Date for last TX on server", lastTxOnServerDate);
    coreSummary.add("Id for last TX on server", lastTxIdOnServer);
    coreSummary.add("Id for last TX in index", lastIndexedTxId);
    coreSummary.add("Approx transactions remaining", transactionsToDo);
    coreSummary.add("Approx transaction indexing time remaining",
            remainingTx.largestComponentformattedString());

    // Change set

    coreSummary.add("Last Index Change Set Commit Time", lastIndexChangeSetCommitTime);
    coreSummary.add("Last Index Change Set Commit Date", lastIndexChangeSetCommitDate);
    coreSummary.add("Change Set Lag", changeSetLagSeconds + " s");
    coreSummary.add("Change Set Duration", changeSetLag.toString());
    coreSummary.add("Timestamp for last Change Set on server", lastChangeSetCommitTimeOnServer);
    coreSummary.add("Date for last Change Set on server", lastChangeSetOnServerDate);
    coreSummary.add("Id for last Change Set on server", lastChangeSetIdOnServer);
    coreSummary.add("Id for last Change Set in index", lastIndexedChangeSetId);
    coreSummary.add("Approx change sets remaining", changeSetsToDo);
    coreSummary.add("Approx change set indexing time remaining",
            remainingChangeSet.largestComponentformattedString());

    coreSummary.add("Approx content indexing time remaining",
            remainingContent.largestComponentformattedString());

    // Stats

    coreSummary.add("Model sync times (ms)",
            srv.getTrackerStats().getModelTimes().getNamedList(detail, hist, values));
    coreSummary.add("Acl index time (ms)",
            srv.getTrackerStats().getAclTimes().getNamedList(detail, hist, values));
    coreSummary.add("Node index time (ms)",
            srv.getTrackerStats().getNodeTimes().getNamedList(detail, hist, values));
    coreSummary.add("Docs/Tx", srv.getTrackerStats().getTxDocs().getNamedList(detail, hist, values));
    coreSummary.add("Doc Transformation time (ms)",
            srv.getTrackerStats().getDocTransformationTimes().getNamedList(detail, hist, values));

    // Model

    Map<String, Set<String>> modelErrors = srv.getModelErrors();
    if (modelErrors.size() > 0) {
        NamedList<Object> errorList = new SimpleOrderedMap<Object>();
        for (Map.Entry<String, Set<String>> modelNameToErrors : modelErrors.entrySet()) {
            errorList.add(modelNameToErrors.getKey(), modelNameToErrors.getValue());
        }
        coreSummary.add(
                "Model changes are not compatible with the existing data model and have not been applied",
                errorList);
    }

    report.add(cname, coreSummary);
}

From source file:org.dice.solrenhancements.morelikethis.DiceMoreLikeThisHandler.java

License:Apache License

private void addDebugInfo(SolrQueryRequest req, SolrQueryResponse rsp, String q, List<Query> mltFqFilters,
        MoreLikeThisHelper mlt, MLTResult mltResult) {
    DocListAndSet mltDocs = mltResult.getDoclist();

    boolean dbg = req.getParams().getBool(CommonParams.DEBUG_QUERY, false);
    boolean dbgQuery = false, dbgResults = false;
    if (dbg == false) {//if it's true, we are doing everything anyway.
        String[] dbgParams = req.getParams().getParams(CommonParams.DEBUG);
        if (dbgParams != null) {
            for (int i = 0; i < dbgParams.length; i++) {
                if (dbgParams[i].equals(CommonParams.QUERY)) {
                    dbgQuery = true;/*  ww w .  j a v a 2  s  .co  m*/
                } else if (dbgParams[i].equals(CommonParams.RESULTS)) {
                    dbgResults = true;
                }
            }
        }
    } else {
        dbgQuery = true;
        dbgResults = true;
    }
    // Copied from StandardRequestHandler... perhaps it should be added to doStandardDebug?
    if (dbg == true) {
        try {

            NamedList<String> it = getMltTermsForDebug(mltResult);

            NamedList<Object> dbgInfo = new NamedList<Object>();
            NamedList<Object> stdDbg = SolrPluginUtils.doStandardDebug(req, q, mlt.getRealMLTQuery(),
                    mltDocs.docList, dbgQuery, dbgResults);
            if (null != dbgInfo) {
                rsp.add("debug", dbgInfo);
                dbgInfo.add("mltTerms", it);
                dbgInfo.addAll(stdDbg);

                if (null != mltFqFilters) {
                    dbgInfo.add("filter_queries", req.getParams().getParams(CommonParams.FQ));
                    List<String> fqs = new ArrayList<String>(mltFqFilters.size());
                    for (Query fq : mltFqFilters) {
                        fqs.add(QueryParsing.toString(fq, req.getSchema()));
                    }
                    dbgInfo.add("mlt_filter_queries", fqs);
                }
            }
        } catch (Exception e) {
            SolrException.log(SolrCore.log, "Exception during debug", e);
            rsp.add("exception_during_debug", SolrException.toStr(e));
        }
    }
}