Example usage for org.apache.solr.client.solrj SolrQuery add

List of usage examples for org.apache.solr.client.solrj SolrQuery add

Introduction

In this page you can find the example usage for org.apache.solr.client.solrj SolrQuery add.

Prototype

public void add(SolrParams params) 

Source Link

Document

Add all of the params provided in the parameter to this params.

Usage

From source file:com.pearson.openideas.cq5.components.services.solr.SolrHelper.java

License:Open Source License

/**
 * Add Filter Criteria to a SOLR Query./* ww w  .j  a  v  a 2s . c o m*/
 * 
 * @param filterCriteria
 *            the list of filter criteria
 * @param query
 *            the query
 */
public static void addFilterCriteria(final Map<String, List<Object>> filterCriteria, final SolrQuery query) {
    if (filterCriteria != null && filterCriteria.size() > 0) {
        for (String key : filterCriteria.keySet()) {
            List<Object> values = filterCriteria.get(key);
            for (Object value : values) {

                if (value == null) {
                    log.debug("Adding null filter criteria {}.", key);
                    query.addFilterQuery(key);
                } else if (value instanceof String[]) {
                    ModifiableSolrParams params = new ModifiableSolrParams();
                    for (String s : (String[]) value) {
                        log.debug("Adding multi-value filter criteria {}:{}.", key, s);
                        params.add(key, s);
                    }
                    query.add(params);
                } else {
                    log.debug("Adding single-value filter criteria {}:{}.", key, String.valueOf(value));
                    query.addFilterQuery(key + ":" + String.valueOf(value));
                }
            }
        }
    }

}

From source file:com.sitewhere.connectors.solr.search.SolrSearchProvider.java

License:Open Source License

@Override
public JsonNode executeQueryWithRawResponse(String queryString) throws SiteWhereException {
    try {/*from   w w  w.  jav  a  2 s  . c  om*/
        LOGGER.debug("About to execute Solr search with query string: " + queryString);

        NoOpResponseParser rawJsonResponseParser = new NoOpResponseParser();
        rawJsonResponseParser.setWriterType("json");

        SolrQuery query = new SolrQuery();
        query.add(createParamsFromQueryString(queryString));
        QueryRequest request = new QueryRequest(query);
        request.setResponseParser(rawJsonResponseParser);
        NamedList<?> results = getSolr().getSolrClient().request(request);
        return MAPPER.readTree((String) results.get("response"));
    } catch (SolrServerException e) {
        throw new SiteWhereException("Unable to execute query.", e);
    } catch (IOException e) {
        throw new SiteWhereException("Unable to execute query.", e);
    }
}

From source file:net.hasor.search.server.rsf.service.SorlSearchService.java

License:Apache License

@Override
public QuerySearchResult query(SearchQuery searchQuery) throws Throwable {
    SolrQuery solrQuery = new SolrQuery();
    solrQuery.add(new MultiMapSolrParams(searchQuery.toMap()));
    QueryResponse response = getSolrClient().query(solrQuery);
    SolrDocumentList docList = response.getResults();
    ////from   w ww  . j  a  v a 2  s  .com
    List<SearchDocument> documentList = new ArrayList<SearchDocument>();
    if (docList != null) {
        for (SolrDocument solrDocument : docList) {
            SearchDocument document = convetTo(solrDocument);
            documentList.add(document);
        }
    }
    //
    QuerySearchResult searchResult = new QuerySearchResult(documentList);
    searchResult.setElapsedTime(response.getElapsedTime());
    searchResult.setMaxScore(docList.getMaxScore());
    searchResult.setNumFound(docList.getNumFound());
    searchResult.setStart(docList.getStart());
    searchResult.setStatus(response.getStatus());
    searchResult.setQueryTime(response.getQTime());
    return searchResult;
}

From source file:org.apache.nifi.processors.solr.QuerySolr.java

License:Apache License

@Override
public void doOnTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final ComponentLog logger = getLogger();

    FlowFile flowFileOriginal = session.get();
    FlowFile flowFileResponse;//  www .  j  a v  a 2s  . c  o  m

    if (flowFileOriginal == null) {
        if (context.hasNonLoopConnection()) {
            return;
        }
        flowFileResponse = session.create();
    } else {
        flowFileResponse = session.create(flowFileOriginal);
    }

    final SolrQuery solrQuery = new SolrQuery();
    final boolean isSolrCloud = SOLR_TYPE_CLOUD.equals(context.getProperty(SOLR_TYPE).getValue());
    final String collection = context.getProperty(COLLECTION).evaluateAttributeExpressions(flowFileResponse)
            .getValue();

    final StringBuilder transitUri = new StringBuilder("solr://");
    transitUri.append(getSolrLocation());
    if (isSolrCloud) {
        transitUri.append(":").append(collection);
    }
    final StopWatch timer = new StopWatch(false);

    try {
        solrQuery.setQuery(context.getProperty(SOLR_PARAM_QUERY).evaluateAttributeExpressions(flowFileResponse)
                .getValue());
        solrQuery.setRequestHandler(context.getProperty(SOLR_PARAM_REQUEST_HANDLER)
                .evaluateAttributeExpressions(flowFileResponse).getValue());

        if (context.getProperty(SOLR_PARAM_FIELD_LIST).isSet()) {
            for (final String field : context.getProperty(SOLR_PARAM_FIELD_LIST)
                    .evaluateAttributeExpressions(flowFileResponse).getValue().split(",")) {
                solrQuery.addField(field.trim());
            }
        }

        // Avoid ArrayIndexOutOfBoundsException due to incorrectly configured sorting
        try {
            if (context.getProperty(SOLR_PARAM_SORT).isSet()) {
                final List<SolrQuery.SortClause> sortings = new ArrayList<>();
                for (final String sorting : context.getProperty(SOLR_PARAM_SORT)
                        .evaluateAttributeExpressions(flowFileResponse).getValue().split(",")) {
                    final String[] sortEntry = sorting.trim().split(" ");
                    sortings.add(new SolrQuery.SortClause(sortEntry[0], sortEntry[1]));
                }
                solrQuery.setSorts(sortings);
            }
        } catch (Exception e) {
            throw new ProcessException("Error while parsing the sort clauses for the Solr query");
        }

        final Integer startParam = context.getProperty(SOLR_PARAM_START).isSet()
                ? Integer.parseInt(context.getProperty(SOLR_PARAM_START)
                        .evaluateAttributeExpressions(flowFileResponse).getValue())
                : CommonParams.START_DEFAULT;

        solrQuery.setStart(startParam);

        final Integer rowParam = context.getProperty(SOLR_PARAM_ROWS).isSet()
                ? Integer.parseInt(context.getProperty(SOLR_PARAM_ROWS)
                        .evaluateAttributeExpressions(flowFileResponse).getValue())
                : CommonParams.ROWS_DEFAULT;

        solrQuery.setRows(rowParam);

        final Map<String, String[]> additionalSolrParams = SolrUtils.getRequestParams(context,
                flowFileResponse);

        final Set<String> searchComponents = extractSearchComponents(additionalSolrParams);
        solrQuery.add(new MultiMapSolrParams(additionalSolrParams));

        final Map<String, String> attributes = new HashMap<>();
        attributes.put(ATTRIBUTE_SOLR_CONNECT, getSolrLocation());
        if (isSolrCloud) {
            attributes.put(ATTRIBUTE_SOLR_COLLECTION, collection);
        }
        attributes.put(ATTRIBUTE_SOLR_QUERY, solrQuery.toString());
        if (flowFileOriginal != null) {
            flowFileOriginal = session.putAllAttributes(flowFileOriginal, attributes);
        }

        flowFileResponse = session.putAllAttributes(flowFileResponse, attributes);

        final boolean getEntireResults = RETURN_ALL_RESULTS
                .equals(context.getProperty(AMOUNT_DOCUMENTS_TO_RETURN).getValue());
        boolean processFacetsAndStats = true;
        boolean continuePaging = true;

        while (continuePaging) {

            timer.start();

            Map<String, String> responseAttributes = new HashMap<>();
            responseAttributes.put(ATTRIBUTE_SOLR_START, solrQuery.getStart().toString());
            responseAttributes.put(ATTRIBUTE_SOLR_ROWS, solrQuery.getRows().toString());

            if (solrQuery.getStart() > UPPER_LIMIT_START_PARAM) {
                logger.warn(
                        "The start parameter of Solr query {} exceeded the upper limit of {}. The query will not be processed "
                                + "to avoid performance or memory issues on the part of Solr.",
                        new Object[] { solrQuery.toString(), UPPER_LIMIT_START_PARAM });
                flowFileResponse = session.putAllAttributes(flowFileResponse, responseAttributes);
                timer.stop();
                break;
            }

            final QueryRequest req = new QueryRequest(solrQuery);
            if (isBasicAuthEnabled()) {
                req.setBasicAuthCredentials(getUsername(), getPassword());
            }

            final QueryResponse response = req.process(getSolrClient());
            timer.stop();

            final Long totalNumberOfResults = response.getResults().getNumFound();

            responseAttributes.put(ATTRIBUTE_SOLR_NUMBER_RESULTS, totalNumberOfResults.toString());
            responseAttributes.put(ATTRIBUTE_CURSOR_MARK, response.getNextCursorMark());
            responseAttributes.put(ATTRIBUTE_SOLR_STATUS, String.valueOf(response.getStatus()));
            responseAttributes.put(ATTRIBUTE_QUERY_TIME, String.valueOf(response.getQTime()));
            flowFileResponse = session.putAllAttributes(flowFileResponse, responseAttributes);

            if (response.getResults().size() > 0) {

                if (context.getProperty(RETURN_TYPE).getValue().equals(MODE_XML.getValue())) {
                    flowFileResponse = session.write(flowFileResponse,
                            SolrUtils.getOutputStreamCallbackToTransformSolrResponseToXml(response));
                    flowFileResponse = session.putAttribute(flowFileResponse, CoreAttributes.MIME_TYPE.key(),
                            MIME_TYPE_XML);
                } else {
                    final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER)
                            .evaluateAttributeExpressions(flowFileResponse)
                            .asControllerService(RecordSetWriterFactory.class);
                    final RecordSchema schema = writerFactory.getSchema(flowFileResponse.getAttributes(), null);
                    final RecordSet recordSet = SolrUtils.solrDocumentsToRecordSet(response.getResults(),
                            schema);
                    final StringBuffer mimeType = new StringBuffer();
                    final FlowFile flowFileResponseRef = flowFileResponse;
                    flowFileResponse = session.write(flowFileResponse, out -> {
                        try (final RecordSetWriter writer = writerFactory.createWriter(getLogger(), schema, out,
                                flowFileResponseRef)) {
                            writer.write(recordSet);
                            writer.flush();
                            mimeType.append(writer.getMimeType());
                        } catch (SchemaNotFoundException e) {
                            throw new ProcessException("Could not parse Solr response", e);
                        }
                    });
                    flowFileResponse = session.putAttribute(flowFileResponse, CoreAttributes.MIME_TYPE.key(),
                            mimeType.toString());
                }

                if (processFacetsAndStats) {
                    if (searchComponents.contains(FacetParams.FACET)) {
                        FlowFile flowFileFacets = session.create(flowFileResponse);
                        flowFileFacets = session.write(flowFileFacets, out -> {
                            try (final OutputStreamWriter osw = new OutputStreamWriter(out);
                                    final JsonWriter writer = new JsonWriter(osw)) {
                                addFacetsFromSolrResponseToJsonWriter(response, writer);
                            }
                        });
                        flowFileFacets = session.putAttribute(flowFileFacets, CoreAttributes.MIME_TYPE.key(),
                                MIME_TYPE_JSON);
                        session.getProvenanceReporter().receive(flowFileFacets, transitUri.toString(),
                                timer.getDuration(TimeUnit.MILLISECONDS));
                        session.transfer(flowFileFacets, FACETS);
                    }

                    if (searchComponents.contains(StatsParams.STATS)) {
                        FlowFile flowFileStats = session.create(flowFileResponse);
                        flowFileStats = session.write(flowFileStats, out -> {
                            try (final OutputStreamWriter osw = new OutputStreamWriter(out);
                                    final JsonWriter writer = new JsonWriter(osw)) {
                                addStatsFromSolrResponseToJsonWriter(response, writer);
                            }
                        });
                        flowFileStats = session.putAttribute(flowFileStats, CoreAttributes.MIME_TYPE.key(),
                                MIME_TYPE_JSON);
                        session.getProvenanceReporter().receive(flowFileStats, transitUri.toString(),
                                timer.getDuration(TimeUnit.MILLISECONDS));
                        session.transfer(flowFileStats, STATS);
                    }
                    processFacetsAndStats = false;
                }
            }

            if (getEntireResults) {
                final Integer totalDocumentsReturned = solrQuery.getStart() + solrQuery.getRows();
                if (totalDocumentsReturned < totalNumberOfResults) {
                    solrQuery.setStart(totalDocumentsReturned);
                    session.getProvenanceReporter().receive(flowFileResponse, transitUri.toString(),
                            timer.getDuration(TimeUnit.MILLISECONDS));
                    session.transfer(flowFileResponse, RESULTS);
                    flowFileResponse = session.create(flowFileResponse);
                } else {
                    continuePaging = false;
                }
            } else {
                continuePaging = false;
            }
        }

    } catch (Exception e) {
        flowFileResponse = session.penalize(flowFileResponse);
        flowFileResponse = session.putAttribute(flowFileResponse, EXCEPTION, e.getClass().getName());
        flowFileResponse = session.putAttribute(flowFileResponse, EXCEPTION_MESSAGE, e.getMessage());
        session.transfer(flowFileResponse, FAILURE);
        logger.error("Failed to execute query {} due to {}. FlowFile will be routed to relationship failure",
                new Object[] { solrQuery.toString(), e }, e);
        if (flowFileOriginal != null) {
            flowFileOriginal = session.penalize(flowFileOriginal);
        }
    }

    if (!flowFileResponse.isPenalized()) {
        session.getProvenanceReporter().receive(flowFileResponse, transitUri.toString(),
                timer.getDuration(TimeUnit.MILLISECONDS));
        session.transfer(flowFileResponse, RESULTS);
    }

    if (flowFileOriginal != null) {
        if (!flowFileOriginal.isPenalized()) {
            session.transfer(flowFileOriginal, ORIGINAL);
        } else {
            session.remove(flowFileOriginal);
        }
    }
}

From source file:org.mousephenotype.cda.solr.SolrUtils.java

License:Apache License

/**
 * Fetch a map of mp terms associated to hp terms, indexed by mp id.
 *
 * @param phenodigm_core a valid solr connection
 * @return a map, indexed by mp id, of all hp terms
 *
 * @throws SolrServerException, IOException
 *///from  w w  w  .  j  a  v  a2  s.  c  o m
public static Map<String, List<Map<String, String>>> populateMpToHpTermsMap(SolrClient phenodigm_core)
        throws SolrServerException, IOException {

    // url="q=mp_id:&quot;${nodeIds.term_id}&quot;&amp;rows=999&amp;fq=type:mp_hp&amp;fl=hp_id,hp_term"
    // processor="XPathEntityProcessor" >
    //
    // <field column="hp_id" xpath="/response/result/doc/str[@name='hp_id']"
    // />
    // <field column="hp_term"
    // xpath="/response/result/doc/str[@name='hp_term']" />
    Map<String, List<Map<String, String>>> mpToHp = new HashMap<>();

    int pos = 0;
    long total = Integer.MAX_VALUE;
    SolrQuery query = new SolrQuery("mp_id:*");
    query.addFilterQuery("type:mp_hp");// &amp;fl=hp_id,hp_term);
    query.add("fl=hp_id,hp_term");
    query.setRows(BATCH_SIZE);
    while (pos < total) {
        query.setStart(pos);
        QueryResponse response = null;
        response = phenodigm_core.query(query);
        total = response.getResults().getNumFound();
        SolrDocumentList solrDocs = response.getResults();
        for (SolrDocument doc : solrDocs) {
            if (doc.containsKey("hp_id")) {
                String hp = (String) doc.get("hp_id");
                if (doc.containsKey("mp_id")) {

                    String mp = (String) doc.get("mp_id");
                    List<Map<String, String>> mapList = new ArrayList<>();
                    Map<String, String> entryMap = new HashMap<>();
                    if (mpToHp.containsKey(mp)) {
                        mapList = mpToHp.get(mp);
                    }
                    entryMap.put("hp_id", hp);
                    if (doc.containsKey("hp_term")) {
                        String hpTerm = (String) doc.get("hp_term");
                        entryMap.put("hp_term", hpTerm);
                    }
                    mapList.add(entryMap);
                    mpToHp.put(mp, mapList);
                }
            }

        }
        pos += BATCH_SIZE;
    }

    return mpToHp;
}

From source file:org.opencms.search.solr.spellchecking.CmsSolrSpellchecker.java

License:Open Source License

/**
 * Performs the actual spell check query using Solr.
 *
 * @param request the spell check request
 *
 * @return Results of the Solr spell check of type SpellCheckResponse or null if something goes wrong.
 *///from w  w  w  . jav a  2 s .c o m
private SpellCheckResponse performSpellcheckQuery(CmsSpellcheckingRequest request) {

    if ((null == request) || !request.isInitialized()) {
        return null;
    }

    final String[] wordsToCheck = request.m_wordsToCheck;

    final ModifiableSolrParams params = new ModifiableSolrParams();
    params.set("spellcheck", "true");
    params.set("spellcheck.dictionary", request.m_dictionaryToUse);
    params.set("spellcheck.extendedResults", "true");

    // Build one string from array of words and use it as query.
    final StringBuilder builder = new StringBuilder();
    for (int i = 0; i < wordsToCheck.length; i++) {
        builder.append(wordsToCheck[i] + " ");
    }

    params.set("spellcheck.q", builder.toString());

    final SolrQuery query = new SolrQuery();
    query.setRequestHandler("/spell");
    query.add(params);

    try {
        QueryResponse qres = m_solrClient.query(query);
        return qres.getSpellCheckResponse();
    } catch (Exception e) {
        LOG.debug("Exception while performing spellcheck query...", e);
    }

    return null;
}

From source file:org.swissbib.docproc.flink.plugins.ViafContentEnrichment.java

License:Open Source License

public String searchVIAFPersonItem(String swissbibPerson) {

    String viafItem = null;/* www .j  av  a 2 s  .c om*/
    StringBuilder sBuilder = new StringBuilder();

    //expected return value if successful
    //id###viafPerson###[viafPerson] -n

    HashMap<String, String[]> viafQuery = new HashMap<String, String[]>();
    //viafQuery.put("q",searchField + ":" + swissbibPerson);

    //todo: transform swissbibPerson tp matchstring

    viafQuery.put("q", new String[] { searchField + ":" + swissbibPerson });
    viafQuery.put("qt", new String[] { "/search" });
    //viafQuery.put("wt", new String [] {"javabin"});
    //viafQuery.put("version", new String [] {"2"});

    ModifiableSolrParams mp = new ModifiableSolrParams(viafQuery);

    SolrQuery sq = new SolrQuery();
    sq.add(mp);

    //QueryRequest qR = new QueryRequest(mp);

    try {
        //solrServer.request(qR) ;

        QueryResponse qR = solrClient.query(sq);

        if (qR.getResults().getNumFound() > 0) {

            SolrDocumentList dL = qR.getResults();

            Iterator<SolrDocument> docList = dL.iterator();

            while (docList.hasNext()) {
                SolrDocument doc = docList.next();

                sBuilder.append(doc.getFieldValue(viafIDField)).append("###");

                Collection<Object> personNames = doc.getFieldValues(valuesField);

                for (Object name : personNames) {

                    sBuilder.append((String) name).append("###");

                }
            }

        }

    } catch (SolrServerException | IOException exc) {
        exc.printStackTrace();
    }

    return viafItem;

}

From source file:org.zaizi.sensefy.api.solr.querybuilder.QueryBuilder.java

License:Open Source License

/**
 * return a solr query built with all the parameters in input . Spellcheck
 * included//from w  ww  .  j a va2  s . c  o m
 * 
 * @param query
 * @param fields
 * @param filters
 * @param start
 * @param rows
 * @param security
 * @param clustering
 * @param sensefyToken
 * @return
 * @throws SensefyException
 */
public static SolrQuery getSolrQuery(String query, String fields, boolean facet,
        FacetConfigurationList facetConfig, String filters, int start, Integer rows, String order,
        boolean security, boolean spellcheck, boolean clustering, Principal user) {

    SensefyUser sensefyUser = SensefyUserMapper.getSensefyUserFromPrincipal(user);

    if (rows == null) {
        rows = DEFAULT_ROWS;
    }
    // retrieve the documents from primary index
    SolrQuery documentsQuery = new SolrQuery(query);
    Set<String> selectedFields = new HashSet<String>();
    documentsQuery.setRows(rows);
    documentsQuery.set("spellcheck", spellcheck);
    documentsQuery.set("clustering", clustering);
    if (fields != null && !fields.isEmpty()) {
        String[] fieldsArray = fields.split(",");
        documentsQuery.setFields(fieldsArray);
    }
    if (filters != null && !filters.isEmpty()) {
        String[] filtersArray = buildFilterQueries(filters.split(","), selectedFields);
        documentsQuery.setFilterQueries(filtersArray);
    }
    if (order != null && !order.isEmpty()) {
        documentsQuery.set(CommonParams.SORT, order);
    }

    if (security) {

        String filterQueryACLs = SecurityQueryBuilder.getSecurityFilterQuery(sensefyUser);
        documentsQuery.addFilterQuery(filterQueryACLs);

    }
    documentsQuery.set("TZ", sensefyUser.getTimezone().getID());
    documentsQuery.setStart(start);
    ModifiableSolrParams querySolrParams = new ModifiableSolrParams();
    if (facet) {
        // facets must be created with proper exclusion tag where needed
        documentsQuery.setFacet(true);
        List<FacetConfiguration> facetConfigurations = facetConfig.getFacetConfigurations();
        for (FacetConfiguration facetConfiguration : facetConfigurations) {
            String tag = null;
            if (selectedFields.contains(facetConfiguration.getField()))
                tag = "tag-" + facetConfiguration.getField();
            facetConfiguration.getSolrFacetParams(querySolrParams, tag);
        }
    }
    documentsQuery.add(querySolrParams);
    return documentsQuery;
}

From source file:uk.ac.ebi.intact.dataexchange.psimi.solr.params.UrlSolrParamsTest.java

License:Apache License

@Test
public void params1() throws Exception {
    String params = "q=*:*&sort=rigid asc&rows=30&fq=+dataset:(\"Cancer\")&fq=+go_expanded_id:(\"GO:0048511\")&start=0";

    UrlSolrParams solrParams = new UrlSolrParams(params);

    SolrQuery solrQuery = new SolrQuery();
    solrQuery.add(solrParams);

    Assert.assertEquals("*:*", solrQuery.getQuery());
    Assert.assertEquals("rigid asc", solrQuery.getSortField());
    Assert.assertEquals(Integer.valueOf(30), solrQuery.getRows());
    Assert.assertEquals(Integer.valueOf(0), solrQuery.getStart());

    Assert.assertTrue(Arrays.asList(solrQuery.getFilterQueries()).contains("+go_expanded_id:(\"GO:0048511\")"));
    Assert.assertTrue(Arrays.asList(solrQuery.getFilterQueries()).contains("+dataset:(\"Cancer\")"));
}

From source file:uk.ac.ebi.phenotype.solr.indexer.utils.SolrUtils.java

License:Apache License

/**
 * Fetch a map of mp terms associated to hp terms, indexed by mp id.
 *
 * @param phenodigm_core a valid solr connection
 * @return a map, indexed by mp id, of all hp terms
 *
 * @throws IndexerException/*ww w  . j av a  2 s. co m*/
 */
public static Map<String, List<Map<String, String>>> populateMpToHpTermsMap(SolrServer phenodigm_core)
        throws IndexerException {

    // url="q=mp_id:&quot;${nodeIds.term_id}&quot;&amp;rows=999&amp;fq=type:mp_hp&amp;fl=hp_id,hp_term"
    // processor="XPathEntityProcessor" >
    //
    // <field column="hp_id" xpath="/response/result/doc/str[@name='hp_id']"
    // />
    // <field column="hp_term"
    // xpath="/response/result/doc/str[@name='hp_term']" />
    Map<String, List<Map<String, String>>> mpToHp = new HashMap<>();

    int pos = 0;
    long total = Integer.MAX_VALUE;
    SolrQuery query = new SolrQuery("mp_id:*");
    query.addFilterQuery("type:mp_hp");// &amp;fl=hp_id,hp_term);
    query.add("fl=hp_id,hp_term");
    query.setRows(BATCH_SIZE);
    while (pos < total) {
        query.setStart(pos);
        QueryResponse response = null;
        try {
            response = phenodigm_core.query(query);
        } catch (Exception e) {
            throw new IndexerException("Unable to query phenodigm_core in SolrUtils.populateMpToHpTermsMap()",
                    e);
        }
        total = response.getResults().getNumFound();
        SolrDocumentList solrDocs = response.getResults();
        for (SolrDocument doc : solrDocs) {
            if (doc.containsKey("hp_id")) {
                String hp = (String) doc.get("hp_id");
                if (doc.containsKey("mp_id")) {

                    String mp = (String) doc.get("mp_id");
                    List<Map<String, String>> mapList = new ArrayList<>();
                    Map<String, String> entryMap = new HashMap<>();
                    if (mpToHp.containsKey(mp)) {
                        mapList = mpToHp.get(mp);
                    }
                    entryMap.put("hp_id", hp);
                    if (doc.containsKey("hp_term")) {
                        String hpTerm = (String) doc.get("hp_term");
                        entryMap.put("hp_term", hpTerm);
                    }
                    mapList.add(entryMap);
                    mpToHp.put(mp, mapList);
                }
            }

        }
        pos += BATCH_SIZE;
    }

    return mpToHp;
}