Example usage for org.apache.solr.client.solrj.response QueryResponse getExplainMap

List of usage examples for org.apache.solr.client.solrj.response QueryResponse getExplainMap

Introduction

In this page you can find the example usage for org.apache.solr.client.solrj.response QueryResponse getExplainMap.

Prototype

public Map<String, Object> getExplainMap() 

Source Link

Usage

From source file:edu.ku.brc.sgr.MatchableRecord.java

License:Open Source License

@Override
public MatchResults doMatch(SolrServer server, SolrQuery baseQuery) {
    final ModifiableSolrParams query = baseQuery.getCopy().set(MoreLikeThisParams.DOC_SUPPLIED, true);

    final MoreLikeThisDocumentRequest req = new MoreLikeThisDocumentRequest(query)
            .addDocument(record.asSolrDocument());

    final QueryResponse resp;
    try {//  w w  w.j  av a2  s .  co m
        resp = req.process(server);
    } catch (Exception e) {
        // TODO Auto-generated catch block
        throw new RuntimeException(e);
    }

    final SolrDocumentList docs = resp.getResults();
    Map<String, String> explain = resp.getExplainMap();
    if (explain == null)
        explain = Maps.newHashMap();

    final ImmutableList.Builder<Match> msBuilder = ImmutableList.builder();
    final float maxScore = (docs != null) ? docs.getMaxScore() : 0.0f;
    if (docs != null) {
        for (SolrDocument doc : docs) {
            float score = (Float) doc.getFieldValue("score");
            SGRRecord match = SGRRecord.fromSolrDocument(doc);
            String explained = explain.get(match.id);
            msBuilder.add(new Match(match, score, explained));
        }
    }
    return new MatchResults(record.id, resp.getQTime(), maxScore, msBuilder.build());
}

From source file:edu.toronto.cs.phenotips.solr.OmimScriptService.java

License:Open Source License

/**
 * Compute a list of phenotypes to investigate, which maximize the probability of getting more accurate automatic
 * diagnosis suggestions.//w ww  .  jav a2s.c  om
 *
 * @param phenotypes the list of already selected phenotypes
 * @param nphenotypes phenotypes that are not observed in the patient
 * @param limit the maximum number of phenotypes to return
 * @return a list of phenotype suggestions
 */
public List<SuggestedPhenotype> getDifferentialPhenotypes(Collection<String> phenotypes,
        Collection<String> nphenotypes, int limit) {
    HPOScriptService hpoService = (HPOScriptService) this.service;
    QueryResponse response;
    List<SuggestedPhenotype> result = new LinkedList<SuggestedPhenotype>();
    try {
        response = this.server.query(prepareParams(phenotypes, nphenotypes));
    } catch (SolrServerException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        return result;
    }
    SolrDocumentList matchingDisorders = response.getResults();
    Map<?, ?> explanations = response.getExplainMap();
    SumMap<String> cummulativeScore = new SumMap<String>();
    CounterMap<String> matchCounter = new CounterMap<String>();
    Set<String> allAncestors = new HashSet<String>();
    for (String phenotype : phenotypes) {
        allAncestors.addAll(hpoService.getAllAncestorsAndSelfIDs(phenotype));
    }
    for (SolrDocument disorder : matchingDisorders) {
        String omimId = (String) disorder.getFieldValue(ID_FIELD_NAME);
        float score = ((SimpleOrderedMap<Float>) explanations.get(omimId)).get("value");
        for (Object hpoId : disorder.getFieldValues("actual_symptom")) {
            if (allAncestors.contains(hpoId) || nphenotypes.contains(hpoId)) {
                continue;
            }
            cummulativeScore.addTo((String) hpoId, (double) score);
            matchCounter.addTo((String) hpoId);
        }
    }
    if (matchCounter.getMinValue() <= matchingDisorders.size() / 2) {
        for (String hpoId : cummulativeScore.keySet()) {
            SolrDocument term = hpoService.get(hpoId);
            if (term == null) {
                continue;
            }
            result.add(new SuggestedPhenotype(hpoId, (String) term.getFieldValue("name"),
                    cummulativeScore.get(hpoId) / (matchCounter.get(hpoId) * matchCounter.get(hpoId))));
        }
        Collections.sort(result);
    }
    return result.subList(0, limit);
}

From source file:eu.europeana.core.BeanQueryModelFactory.java

License:EUPL

private List<? extends BriefDoc> addIndexToBriefDocList(SolrQuery solrQuery,
        List<? extends BriefDoc> briefDocList, QueryResponse solrResponse) {
    Boolean debug = solrQuery.getBool("debugQuery");
    Map<String, String> explainMap = solrResponse.getExplainMap();
    Integer start = solrQuery.getStart();
    int index = start == null ? 1 : start + 1;
    for (BriefDoc briefDoc : briefDocList) {
        briefDoc.setIndex(index++);/*w  ww .j  a  v a  2 s . c o m*/
        briefDoc.setFullDocUrl(createFullDocUrl(briefDoc.getId()));
        if (debug != null && debug) {
            briefDoc.setDebugQuery(explainMap.get(briefDoc.getId()));
        }
    }
    return briefDocList;
}

From source file:eu.europeana.solr.SimpleCollectionSolrInstance.java

License:Apache License

public static void main(String[] args) throws SolrServerException, IOException {
    SimpleCollectionSolrInstance tester = new SimpleCollectionSolrInstance();
    tester.setSolrdir(new File(new File(new File(new File("src"), "test"), "resources"), "solr/" + CORE1));

    SolrQuery q = new SolrQuery("leonardo");
    q.set("debugQuery", "on");
    q.set("defType", "bm25f");

    q.setRows(10); // don't actually request any data

    QueryResponse qr = tester.query(q);
    Map<String, String> explainmap = qr.getExplainMap();
    System.out.println("results " + qr.getResults().getNumFound());
    for (SolrDocument doc : qr.getResults()) {
        System.out.println("Title: " + doc.getFieldValue("title"));
        System.out.println("Expl: " + explainmap.get(doc.getFieldValue("europeana_id")));
    }//  w  ww .j a v  a2s .c  o  m

    tester.close();
}

From source file:eu.europeana.solr.SolrServerTester.java

License:Apache License

public static void main(String[] args) throws SolrServerException, IOException {
    SolrServerTester tester = new SolrServerTester();
    tester.setSolrdir(new File(new File(new File(new File("src"), "test"), "resources"), "solr/" + CORE1));
    SolrServerIndexer indexer = new SolrServerIndexer();
    indexer.index(tester);//from  w  w  w .  java2  s.  co m
    tester.commit();

    SolrQuery q = new SolrQuery("Watermark");
    q.set("debugQuery", "on");
    q.set("defType", "bm25f");
    q.set("qf", "title text");

    q.setRows(10); // don't actually request any data

    QueryResponse qr = tester.query(q);
    Map<String, String> explainmap = qr.getExplainMap();
    System.out.println("results " + qr.getResults().getNumFound());
    for (SolrDocument doc : qr.getResults()) {
        System.out.println("Title: " + doc.getFieldValue("title"));
        System.out.println("Expl: " + explainmap.get(doc.getFieldValue("europeana_id")));
    }

    tester.close();
}

From source file:org.opencms.search.solr.AllTests.java

License:Open Source License

/**
 * Prints a Solr query response.<p>
 *
 * @param qr the query response/*w  ww  . j a va2 s. c o m*/
 */
@SuppressWarnings("unused")
private static void printResultDetails(QueryResponse qr) {

    SolrDocumentList sdl = qr.getResults();
    qr.getExplainMap();

    // System.out.println(sdl.toString());

    ArrayList<HashMap<String, Object>> hitsOnPage = new ArrayList<HashMap<String, Object>>();
    for (SolrDocument d : sdl) {
        HashMap<String, Object> values = new HashMap<String, Object>();
        Iterator<Map.Entry<String, Object>> i = d.iterator();
        while (i.hasNext()) {
            Map.Entry<String, Object> e2 = i.next();
            values.put(e2.getKey(), e2.getValue());
        }

        hitsOnPage.add(values);
        System.out.println(values.get("path") + " (" + values.get("Title") + ")");
    }
    List<FacetField> facets = qr.getFacetFields();

    if (facets != null) {
        for (FacetField facet : facets) {
            List<FacetField.Count> facetEntries = facet.getValues();

            if (facetEntries != null) {
                for (FacetField.Count fcount : facetEntries) {
                    System.out.println(fcount.getName() + ": " + fcount.getCount());
                }
            }
        }
    }
}

From source file:org.phenotips.diagnosis.differentialPhenotypes.PhenotypeSuggestService.java

License:Open Source License

/**
 * Compute a list of phenotypes to investigate, which maximize the probability of getting more accurate automatic
 * diagnosis suggestions./*from w  w w . j a va 2 s .  c  o  m*/
 *
 * @param phenotypes the list of already selected phenotypes
 * @param nphenotypes phenotypes that are not observed in the patient
 * @param limit the maximum number of phenotypes to return
 * @return a list of phenotype suggestions
 */
public List<SuggestedPhenotype> getDifferentialPhenotypes(Collection<String> phenotypes,
        Collection<String> nphenotypes, int limit) {
    QueryResponse response;
    List<SuggestedPhenotype> result = new LinkedList<>();
    try {
        response = this.solrManager.getSolrConnection("omim").query(prepareParams(phenotypes, nphenotypes));
    } catch (SolrServerException | IOException ex) {
        this.logger.warn("Failed to query OMIM index: {}", ex.getMessage());
        return result;
    }
    SolrDocumentList matchingDisorders = response.getResults();
    Map<?, ?> explanations = response.getExplainMap();
    SumMap<String> cummulativeScore = new SumMap<>();
    CounterMap<String> matchCounter = new CounterMap<>();
    Set<String> allAncestors = new HashSet<>();
    for (String phenotype : phenotypes) {
        allAncestors.addAll(this.getAllAncestorsAndSelfIDs(phenotype));
    }
    for (SolrDocument disorder : matchingDisorders) {
        String omimId = (String) disorder.getFieldValue("id");
        @SuppressWarnings("unchecked")
        SimpleOrderedMap<Float> omimTerm = (SimpleOrderedMap<Float>) explanations.get(omimId);
        float score = omimTerm.get("value");
        for (Object hpoId : disorder.getFieldValues("actual_symptom")) {
            if (allAncestors.contains(hpoId) || nphenotypes.contains(hpoId)
                    || !this.getAllAncestorsAndSelfIDs((String) hpoId).contains("HP:0000118")) {
                continue;
            }
            cummulativeScore.addTo((String) hpoId, (double) score);
            matchCounter.addTo((String) hpoId);
        }
    }
    if (matchCounter.getMinValue() <= matchingDisorders.size() / 2) {
        for (String hpoId : cummulativeScore.keySet()) {
            VocabularyTerm term = this.hpo.getTerm(hpoId);
            if (term == null) {
                continue;
            }
            result.add(new SuggestedPhenotype(hpoId, (String) term.get("name"),
                    cummulativeScore.get(hpoId) / (matchCounter.get(hpoId) * matchCounter.get(hpoId))));
        }
        Collections.sort(result);
    }
    return result.subList(0, Math.min(limit, result.size()));
}

From source file:org.phenotips.solr.OmimScriptService.java

License:Open Source License

/**
 * Compute a list of phenotypes to investigate, which maximize the probability of getting more accurate automatic
 * diagnosis suggestions.//from   w w  w .  ja  v  a2  s  . c  om
 *
 * @param phenotypes the list of already selected phenotypes
 * @param nphenotypes phenotypes that are not observed in the patient
 * @param limit the maximum number of phenotypes to return
 * @return a list of phenotype suggestions
 */
public List<SuggestedPhenotype> getDifferentialPhenotypes(Collection<String> phenotypes,
        Collection<String> nphenotypes, int limit) {
    HPOScriptService hpoService = (HPOScriptService) this.service;
    QueryResponse response;
    List<SuggestedPhenotype> result = new LinkedList<SuggestedPhenotype>();
    try {
        response = this.server.query(prepareParams(phenotypes, nphenotypes));
    } catch (SolrServerException ex) {
        this.logger.warn("Failed to query OMIM index: {}", ex.getMessage());
        return result;
    }
    SolrDocumentList matchingDisorders = response.getResults();
    Map<?, ?> explanations = response.getExplainMap();
    SumMap<String> cummulativeScore = new SumMap<String>();
    CounterMap<String> matchCounter = new CounterMap<String>();
    Set<String> allAncestors = new HashSet<String>();
    for (String phenotype : phenotypes) {
        allAncestors.addAll(hpoService.getAllAncestorsAndSelfIDs(phenotype));
    }
    for (SolrDocument disorder : matchingDisorders) {
        String omimId = (String) disorder.getFieldValue(ID_FIELD_NAME);
        @SuppressWarnings("unchecked")
        SimpleOrderedMap<Float> omimTerm = (SimpleOrderedMap<Float>) explanations.get(omimId);
        float score = omimTerm.get("value");
        for (Object hpoId : disorder.getFieldValues("actual_symptom")) {
            if (allAncestors.contains(hpoId) || nphenotypes.contains(hpoId)
                    || !hpoService.getAllAncestorsAndSelfIDs((String) hpoId).contains("HP:0000118")) {
                continue;
            }
            cummulativeScore.addTo((String) hpoId, (double) score);
            matchCounter.addTo((String) hpoId);
        }
    }
    if (matchCounter.getMinValue() <= matchingDisorders.size() / 2) {
        for (String hpoId : cummulativeScore.keySet()) {
            SolrDocument term = hpoService.get(hpoId);
            if (term == null) {
                continue;
            }
            result.add(new SuggestedPhenotype(hpoId, (String) term.getFieldValue("name"),
                    cummulativeScore.get(hpoId) / (matchCounter.get(hpoId) * matchCounter.get(hpoId))));
        }
        Collections.sort(result);
    }
    return result.subList(0, limit);
}