Example usage for org.apache.solr.client.solrj SolrQuery setStart

List of usage examples for org.apache.solr.client.solrj SolrQuery setStart

Introduction

In this page you can find the example usage for org.apache.solr.client.solrj SolrQuery setStart.

Prototype

public SolrQuery setStart(Integer start) 

Source Link

Usage

From source file:uk.ac.ebi.phenotype.solr.indexer.AlleleIndexer.java

License:Apache License

private void populateDiseaseLookup() throws SolrServerException {

    int docsRetrieved = 0;
    int numDocs = getDiseaseDocCount();

    // Fields in the solr core to bring back
    String fields = StringUtils.join(Arrays.asList(DiseaseBean.DISEASE_ID, DiseaseBean.MGI_ACCESSION_ID,
            DiseaseBean.DISEASE_SOURCE, DiseaseBean.DISEASE_TERM, DiseaseBean.DISEASE_ALTS,
            DiseaseBean.DISEASE_CLASSES, DiseaseBean.HUMAN_CURATED, DiseaseBean.MOUSE_CURATED,
            DiseaseBean.MGI_PREDICTED, DiseaseBean.IMPC_PREDICTED, DiseaseBean.MGI_PREDICTED_KNOWN_GENE,
            DiseaseBean.IMPC_PREDICTED_KNOWN_GENE, DiseaseBean.MGI_NOVEL_PREDICTED_IN_LOCUS,
            DiseaseBean.IMPC_NOVEL_PREDICTED_IN_LOCUS), ",");

    // The solrcloud instance cannot give us all results back at once,
    // we must batch up the calls and build it up piece at a time
    while (docsRetrieved < numDocs + PHENODIGM_BATCH_SIZE) {

        SolrQuery query = new SolrQuery("*:*");
        query.addFilterQuery("type:disease_gene_summary");
        query.setFields(fields);/*from  w  w w  . ja  va2 s  .c  o m*/
        query.setStart(docsRetrieved);
        query.setRows(PHENODIGM_BATCH_SIZE);
        query.setSort(DiseaseBean.DISEASE_ID, SolrQuery.ORDER.asc);

        QueryResponse response = phenodigmCore.query(query);
        List<DiseaseBean> diseases = response.getBeans(DiseaseBean.class);
        for (DiseaseBean disease : diseases) {
            if (!diseaseLookup.containsKey(disease.getMgiAccessionId())) {
                diseaseLookup.put(disease.getMgiAccessionId(), new ArrayList<DiseaseBean>());
            }
            diseaseLookup.get(disease.getMgiAccessionId()).add(disease);
        }

        docsRetrieved += PHENODIGM_BATCH_SIZE;
        logger.info("Processed {} documents from phenodigm. {} genes in the index", docsRetrieved,
                diseaseLookup.size());

    }
}

From source file:uk.ac.ebi.phenotype.solr.indexer.utils.SolrUtils.java

License:Apache License

/**
 * Fetch a map of image terms indexed by ma id
 *
 * @param imagesCore a valid solr connection
 * @return a map, indexed by child ma id, of all parent terms with
 * associations/*  w  w  w  .ja  va 2 s  .  c  om*/
 * @throws IndexerException
 */
public static Map<String, List<SangerImageDTO>> populateSangerImagesMap(SolrServer imagesCore)
        throws IndexerException {
    Map<String, List<SangerImageDTO>> map = new HashMap();

    int pos = 0;
    long total = Integer.MAX_VALUE;
    SolrQuery query = new SolrQuery("maTermId:*");
    query.setRows(BATCH_SIZE);
    while (pos < total) {
        query.setStart(pos);
        QueryResponse response = null;
        try {
            response = imagesCore.query(query);
        } catch (Exception e) {
            throw new IndexerException("Unable to query images core", e);
        }

        total = response.getResults().getNumFound();
        List<SangerImageDTO> imageList = response.getBeans(SangerImageDTO.class);

        for (SangerImageDTO image : imageList) {
            for (String termId : image.getMaTermId()) {
                if (!map.containsKey(termId)) {
                    map.put(termId, new ArrayList<SangerImageDTO>());
                }
                String imageId = image.getId();
                List<SangerImageDTO> sangerImageList = map.get(termId);

                boolean imageFound = false;
                for (SangerImageDTO dto : sangerImageList) {
                    if (dto.getId().equalsIgnoreCase(imageId)) {
                        imageFound = true;
                        break;
                    }
                }
                // Don't add duplicate images.
                if (!imageFound) {
                    map.get(termId).add(image);
                }
            }
        }
        pos += BATCH_SIZE;
    }

    return map;
}

From source file:uk.ac.ebi.phenotype.solr.indexer.utils.SolrUtils.java

License:Apache License

/**
 * Fetch a map of image terms indexed by ma id
 *
 * @param imagesCore a valid solr connection
 * @return a map, indexed by child ma id, of all parent terms with
 * associations//from   www.ja  va2s.  com
 * @throws IndexerException
 */
protected static Map<String, List<SangerImageDTO>> populateSangerImagesByMgiAccession(SolrServer imagesCore)
        throws IndexerException {
    Map<String, List<SangerImageDTO>> map = new HashMap();

    int pos = 0;
    long total = Integer.MAX_VALUE;
    SolrQuery query = new SolrQuery("mgi_accession_id:*");
    query.setRows(BATCH_SIZE);
    while (pos < total) {
        query.setStart(pos);
        QueryResponse response = null;
        try {
            response = imagesCore.query(query);
        } catch (Exception e) {
            throw new IndexerException("Unable to query images core", e);
        }
        total = response.getResults().getNumFound();
        List<SangerImageDTO> imageList = response.getBeans(SangerImageDTO.class);
        for (SangerImageDTO image : imageList) {
            if (!map.containsKey(image.getAccession())) {
                map.put(image.getAccession(), new ArrayList<SangerImageDTO>());
            }
            String imageId = image.getId();
            List<SangerImageDTO> sangerImageList = map.get(image.getAccession());

            boolean imageFound = false;
            for (SangerImageDTO dto : sangerImageList) {
                if (dto.getId().equalsIgnoreCase(imageId)) {
                    imageFound = true;
                    break;
                }
            }
            // Don't add duplicate images.
            if (!imageFound) {
                map.get(image.getAccession()).add(image);
            }

        }
        pos += BATCH_SIZE;
    }

    return map;
}

From source file:uk.ac.ebi.phenotype.solr.indexer.utils.SolrUtils.java

License:Apache License

/**
 * Fetch a map of mgi accessions to alleles
 *
 * @param alleleCore a valid solr connection
 * @return a map, indexed by MGI Accession id, of all alleles
 *
 * @throws IndexerException//from  ww w  .  j  ava2  s.c o  m
 */
protected static Map<String, List<AlleleDTO>> populateAllelesMap(SolrServer alleleCore)
        throws IndexerException {

    Map<String, List<AlleleDTO>> alleles = new HashMap<>();

    int pos = 0;
    long total = Integer.MAX_VALUE;
    SolrQuery query = new SolrQuery("*:*");
    query.setRows(BATCH_SIZE);
    while (pos < total) {
        query.setStart(pos);
        QueryResponse response = null;
        try {
            response = alleleCore.query(query);
        } catch (Exception e) {
            throw new IndexerException("Unable to query allele core in SolrUtils.populateAllelesMap()", e);
        }
        total = response.getResults().getNumFound();
        List<AlleleDTO> alleleList = response.getBeans(AlleleDTO.class);
        for (AlleleDTO allele : alleleList) {
            String key = allele.getMgiAccessionId();
            if (!alleles.containsKey(key)) {
                alleles.put(key, new ArrayList<AlleleDTO>());
            }
            alleles.get(key).add(allele);
        }
        pos += BATCH_SIZE;
    }
    logger.debug("Loaded {} alleles", alleles.size());

    return alleles;
}

From source file:uk.ac.ebi.phenotype.solr.indexer.utils.SolrUtils.java

License:Apache License

/**
 * Fetch a map of mp terms associated to hp terms, indexed by mp id.
 *
 * @param phenodigm_core a valid solr connection
 * @return a map, indexed by mp id, of all hp terms
 *
 * @throws IndexerException//  ww w  .  j av  a 2  s .  com
 */
public static Map<String, List<Map<String, String>>> populateMpToHpTermsMap(SolrServer phenodigm_core)
        throws IndexerException {

    // url="q=mp_id:&quot;${nodeIds.term_id}&quot;&amp;rows=999&amp;fq=type:mp_hp&amp;fl=hp_id,hp_term"
    // processor="XPathEntityProcessor" >
    //
    // <field column="hp_id" xpath="/response/result/doc/str[@name='hp_id']"
    // />
    // <field column="hp_term"
    // xpath="/response/result/doc/str[@name='hp_term']" />
    Map<String, List<Map<String, String>>> mpToHp = new HashMap<>();

    int pos = 0;
    long total = Integer.MAX_VALUE;
    SolrQuery query = new SolrQuery("mp_id:*");
    query.addFilterQuery("type:mp_hp");// &amp;fl=hp_id,hp_term);
    query.add("fl=hp_id,hp_term");
    query.setRows(BATCH_SIZE);
    while (pos < total) {
        query.setStart(pos);
        QueryResponse response = null;
        try {
            response = phenodigm_core.query(query);
        } catch (Exception e) {
            throw new IndexerException("Unable to query phenodigm_core in SolrUtils.populateMpToHpTermsMap()",
                    e);
        }
        total = response.getResults().getNumFound();
        SolrDocumentList solrDocs = response.getResults();
        for (SolrDocument doc : solrDocs) {
            if (doc.containsKey("hp_id")) {
                String hp = (String) doc.get("hp_id");
                if (doc.containsKey("mp_id")) {

                    String mp = (String) doc.get("mp_id");
                    List<Map<String, String>> mapList = new ArrayList<>();
                    Map<String, String> entryMap = new HashMap<>();
                    if (mpToHp.containsKey(mp)) {
                        mapList = mpToHp.get(mp);
                    }
                    entryMap.put("hp_id", hp);
                    if (doc.containsKey("hp_term")) {
                        String hpTerm = (String) doc.get("hp_term");
                        entryMap.put("hp_term", hpTerm);
                    }
                    mapList.add(entryMap);
                    mpToHp.put(mp, mapList);
                }
            }

        }
        pos += BATCH_SIZE;
    }

    return mpToHp;
}

From source file:uk.ac.ebi.phenotype.solr.indexer.utils.SolrUtils.java

License:Apache License

/**
 * Get a map of MpDTOs by key mgiAccesion
 *
 * @param mpSolrServer/*  w w  w .j ava2s  . com*/
 * @return the map
 * @throws IndexerException
 */
public static Map<String, List<MpDTO>> populateMgiAccessionToMp(SolrServer mpSolrServer)
        throws IndexerException {

    Map<String, List<MpDTO>> mps = new HashMap<>();
    int pos = 0;
    long total = Integer.MAX_VALUE;
    SolrQuery query = new SolrQuery("mgi_accession_id:*");
    //query.add("fl=mp_id,mp_term,mp_definition,mp_term_synonym,ontology_subset,hp_id,hp_term,top_level_mp_id,top_level_mp_term,top_level_mp_term_synonym,intermediate_mp_id,intermediate_mp_term,intermediate_mp_term_synonym,child_mp_id,child_mp_term,child_mp_term_synonym,inferred_ma_id,inferred_ma_term,inferred_ma_term_synonym,inferred_selected_top_level_ma_id,inferred_selected_top_level_ma_term,inferred_selected_top_level_ma_term_synonym,inferred_child_ma_id,inferred_child_ma_term,inferred_child_ma_term_synonym");
    query.setRows(BATCH_SIZE);
    while (pos < total) {
        query.setStart(pos);
        QueryResponse response = null;
        try {
            response = mpSolrServer.query(query);
        } catch (Exception e) {
            e.printStackTrace();
            throw new IndexerException("Unable to query phenodigm_core in SolrUtils.populateMpToHpTermsMap()",
                    e);
        }
        total = response.getResults().getNumFound();
        List<MpDTO> mpBeans = response.getBeans(MpDTO.class);

        for (MpDTO mp : mpBeans) {
            if (mp.getMgiAccessionId() != null && !mp.getMgiAccessionId().equals("")) {
                for (String geneAccession : mp.getMgiAccessionId()) {

                    if (mps.containsKey(geneAccession)) {
                        mps.get(geneAccession).add(mp);
                    } else {
                        List<MpDTO> mpListPerGene = new ArrayList<>();
                        mpListPerGene.add(mp);
                        mps.put(geneAccession, mpListPerGene);
                    }
                }
            }
        }
        pos += BATCH_SIZE;
    }
    return mps;
}

From source file:uk.ac.ebi.phenotype.solr.indexer.utils.SolrUtils.java

License:Apache License

/**
 * Get a map of MpDTOs by key mgiAccesion
 *
 * @param mpSolrServer/* w  w w .java2 s. c  o m*/
 * @return the map
 * @throws IndexerException
 */
public static Map<String, MpDTO> populateMpTermIdToMp(SolrServer mpSolrServer) throws IndexerException {

    Map<String, MpDTO> mps = new HashMap<>();
    int pos = 0;
    long total = Integer.MAX_VALUE;
    SolrQuery query = new SolrQuery("*:*");
    //query.add("fl=mp_id,mp_term,mp_definition,mp_term_synonym,ontology_subset,hp_id,hp_term,top_level_mp_id,top_level_mp_term,top_level_mp_term_synonym,intermediate_mp_id,intermediate_mp_term,intermediate_mp_term_synonym,child_mp_id,child_mp_term,child_mp_term_synonym,inferred_ma_id,inferred_ma_term,inferred_ma_term_synonym,inferred_selected_top_level_ma_id,inferred_selected_top_level_ma_term,inferred_selected_top_level_ma_term_synonym,inferred_child_ma_id,inferred_child_ma_term,inferred_child_ma_term_synonym");
    query.setRows(BATCH_SIZE);
    while (pos < total) {
        query.setStart(pos);
        QueryResponse response = null;
        try {
            response = mpSolrServer.query(query);
        } catch (Exception e) {
            e.printStackTrace();
            throw new IndexerException("Unable to query phenodigm_core in SolrUtils.populateMpToHpTermsMap()",
                    e);
        }
        total = response.getResults().getNumFound();
        List<MpDTO> mpBeans = response.getBeans(MpDTO.class);

        for (MpDTO mp : mpBeans) {
            if (mp.getMpId() != null && !mp.getMpId().equals("")) {

                mps.put(mp.getMpId(), mp);

            }
        }
        pos += BATCH_SIZE;
    }
    return mps;
}

From source file:uk.ac.ebi.phis.service.BasicService.java

License:Apache License

public QueryResponse getDocuments(int rows, int start) throws SolrServerException {

    SolrQuery solrQuery = new SolrQuery();
    solrQuery.setQuery("*:*");
    solrQuery.setRows(rows);/*  w  w  w.jav a 2s  .  co  m*/
    solrQuery.setStart(start);
    return solr.query(solrQuery);
}

From source file:uk.ac.edukapp.repository.SolrConnector.java

License:Apache License

/**
 * Execute query on Solr server. Always returns a result set list, even if
 * the query fails.// w  w  w .j a v  a 2s . c  o  m
 * 
 * @param term
 * @return search results object including matching widgets
 */
public SearchResults query(String term, String lang, int rows, int offset) {
    try {
        SolrServer server = getLocalizedSolrServer(lang);
        SolrQuery query = new SolrQuery();
        query.setRows(rows);
        query.setStart(offset);
        query.setQuery(term);
        QueryResponse rsp = server.query(query);
        SearchResults searchResults = new SearchResults();
        List<Widget> widgets = rsp.getBeans(Widget.class);
        searchResults.setWidgets(widgets);
        searchResults.setNumber_of_results(rsp.getResults().getNumFound());
        return searchResults;
    } catch (Exception e) {
        e.printStackTrace();
    }
    return new SearchResults();
}

From source file:uk.co.flax.biosolr.ontology.search.solr.SolrDocumentSearch.java

License:Apache License

@Override
public ResultsList<Document> searchDocuments(String term, int start, int rows, List<String> additionalFields,
        List<String> filters) throws SearchEngineException {
    ResultsList<Document> results;

    try {/* w ww.  ja v a2s . c om*/
        SolrQuery query = new SolrQuery(term);
        query.setStart(start);
        query.setRows(rows);
        query.setRequestHandler(config.getDocumentRequestHandler());
        List<String> queryFields = new ArrayList<>(DEFAULT_SEARCH_FIELDS);
        if (additionalFields != null) {
            queryFields.addAll(additionalFields);
        }
        if (filters != null) {
            query.addFilterQuery(filters.toArray(new String[filters.size()]));
        }
        query.setParam(DisMaxParams.QF, queryFields.toArray(new String[queryFields.size()]));

        LOGGER.debug("Query: {}", query);

        QueryResponse response = server.query(query);
        List<Document> docs;
        long total = 0;

        if (response.getGroupResponse() != null) {
            docs = new ArrayList<>(rows);
            GroupResponse gResponse = response.getGroupResponse();
            for (GroupCommand gCommand : gResponse.getValues()) {
                total += gCommand.getNGroups();
                for (Group group : gCommand.getValues()) {
                    docs.addAll(server.getBinder().getBeans(Document.class, group.getResult()));
                }
            }
        } else if (response.getResults().getNumFound() == 0) {
            docs = new ArrayList<>();
        } else {
            docs = response.getBeans(Document.class);
            total = response.getResults().getNumFound();
        }

        results = new ResultsList<>(docs, start, (start / rows), total);
    } catch (SolrServerException | IOException e) {
        throw new SearchEngineException(e);
    }

    return results;
}