List of usage examples for org.apache.solr.client.solrj SolrQuery addField
public SolrQuery addField(String field)
From source file:edu.vt.vbi.patric.proteinfamily.FIGfamData.java
License:Apache License
@SuppressWarnings("unchecked") public void getLocusTags(ResourceRequest request, PrintWriter writer) { JSONArray arr = new JSONArray(); DataApiHandler dataApi = new DataApiHandler(request); try {//w w w. j ava2 s.co m final String familyType = request.getParameter("familyType"); final String familyId = familyType + "_id"; SolrQuery solr_query = new SolrQuery(); solr_query.setQuery("genome_id:(" + request.getParameter("genomeIds") + ") AND " + familyId + ":(" + request.getParameter("familyIds") + ")"); solr_query.setFilterQueries("annotation:PATRIC AND feature_type:CDS"); solr_query.addField("feature_id,patric_id,refseq_locus_tag,alt_locus_tag"); solr_query.setRows(DataApiHandler.MAX_ROWS); LOGGER.debug("getLocusTags(): [{}] {}", SolrCore.FEATURE.toString(), solr_query); String apiResponse = dataApi.solrQuery(SolrCore.FEATURE, solr_query); Map resp = jsonReader.readValue(apiResponse); Map respBody = (Map) resp.get("response"); List<GenomeFeature> features = dataApi.bindDocuments((List<Map>) respBody.get("docs"), GenomeFeature.class); for (GenomeFeature feature : features) { arr.add(feature.toJSONObject()); } } catch (IOException e) { LOGGER.error(e.getMessage(), e); } JSONObject data = new JSONObject(); data.put("data", arr); try { data.writeJSONString(writer); } catch (IOException e) { LOGGER.error(e.getMessage(), e); } }
From source file:edu.vt.vbi.patric.proteinfamily.FIGfamData.java
License:Apache License
public String getGenomeIdsForTaxon(ResourceRequest request) throws IOException { String taxon = request.getParameter("taxonId"); String genomeFilter = request.getParameter("genomeFilter"); List<String> gIds = new ArrayList<>(); SolrQuery query = new SolrQuery("patric_cds:[1 TO *] AND taxon_lineage_ids:" + taxon); if (genomeFilter != null && !genomeFilter.equals("")) { query.addFilterQuery(genomeFilter); }/*from w w w. j av a 2 s .co m*/ query.addField("genome_id"); query.setSort("genome_name", SolrQuery.ORDER.asc); query.setRows(DataApiHandler.MAX_ROWS); LOGGER.trace("getGenomeIdsForTaxon: [{}] {}", SolrCore.GENOME.getSolrCoreName(), query); String apiResponse = dataApiHandler.solrQuery(SolrCore.GENOME, query); Map resp = jsonReader.readValue(apiResponse); Map respBody = (Map) resp.get("response"); List<Genome> genomes = dataApiHandler.bindDocuments((List<Map>) respBody.get("docs"), Genome.class); for (final Genome g : genomes) { gIds.add(g.getId()); } // TODO: remove this when data API limit is removed if (gIds.size() == 25000) { query.setStart(25000); apiResponse = dataApiHandler.solrQuery(SolrCore.GENOME, query); resp = jsonReader.readValue(apiResponse); respBody = (Map) resp.get("response"); genomes = dataApiHandler.bindDocuments((List<Map>) respBody.get("docs"), Genome.class); for (final Genome g : genomes) { gIds.add(g.getId()); } } return StringUtils.join(gIds, ","); }
From source file:edu.vt.vbi.patric.proteinfamily.FIGfamData.java
License:Apache License
private SequenceData[] getFeatureSequences(String[] featureIds) throws IOException { List<SequenceData> collect = new ArrayList<>(); SolrQuery query = new SolrQuery("feature_id:(" + StringUtils.join(featureIds, " OR ") + ")"); query.addField("genome_name,patric_id,refseq_locus_tag,alt_locus_tag,aa_sequence"); query.setRows(featureIds.length);//from w ww. j a v a 2 s . co m LOGGER.trace("getFeatureSequences: [{}] {}", SolrCore.FEATURE.getSolrCoreName(), query); String apiResponse = dataApiHandler.solrQuery(SolrCore.FEATURE, query); Map resp = jsonReader.readValue(apiResponse); Map respBody = (Map) resp.get("response"); List<GenomeFeature> features = dataApiHandler.bindDocuments((List<Map>) respBody.get("docs"), GenomeFeature.class); for (GenomeFeature feature : features) { String locusTag = ""; if (feature.hasPatricId()) { locusTag = feature.getPatricId(); } else { if (feature.hasRefseqLocusTag()) { locusTag = feature.getRefseqLocusTag(); } else { if (feature.hasAltLocusTag()) { locusTag = feature.getAltLocusTag(); } } } collect.add( new SequenceData(feature.getGenomeName().replace(" ", "_"), locusTag, feature.getAaSequence())); } SequenceData[] result = new SequenceData[collect.size()]; collect.toArray(result); return result; }
From source file:edu.vt.vbi.patric.proteinfamily.FIGfamData.java
License:Apache License
public void getFeatureIds(ResourceRequest request, PrintWriter writer, String keyword) { try {/*from www . ja va 2 s .com*/ DataApiHandler dataApi = new DataApiHandler(request); SolrQuery query = new SolrQuery(keyword); query.addField("feature_id"); query.setRows(DataApiHandler.MAX_ROWS); String apiResponse = dataApi.solrQuery(SolrCore.FEATURE, query); Map resp = jsonReader.readValue(apiResponse); Map respBody = (Map) resp.get("response"); List<GenomeFeature> features = dataApi.bindDocuments((List<Map>) respBody.get("docs"), GenomeFeature.class); List<String> featureIds = new ArrayList<>(); for (GenomeFeature feature : features) { featureIds.add(feature.getId()); } writer.write(StringUtils.join(featureIds, ",")); } catch (IOException e) { LOGGER.debug(e.getMessage(), e); } }
From source file:edu.vt.vbi.patric.proteinfamily.FIGfamData.java
License:Apache License
@SuppressWarnings("unchecked") public void getGenomeDetails(ResourceRequest request, PrintWriter writer) throws IOException { String cType = request.getParameter("context_type"); String cId = request.getParameter("context_id"); String keyword = ""; if (cType != null && cType.equals("taxon") && cId != null && !cId.equals("")) { keyword = "patric_cds:[1 TO *] AND taxon_lineage_ids:" + cId; } else if (request.getParameter("keyword") != null) { keyword = request.getParameter("keyword"); }/* w w w. j a v a 2 s .c om*/ String fields = request.getParameter("fields"); DataApiHandler dataApi = new DataApiHandler(request); SolrQuery query = new SolrQuery(keyword); if (fields != null && !fields.equals("")) { query.addField(fields); } query.setRows(DataApiHandler.MAX_ROWS).addSort("genome_name", SolrQuery.ORDER.asc); String pk = request.getParameter("param_key"); Map<String, String> key = null; if (pk != null) { key = jsonReader.readValue(SessionHandler.getInstance().get(SessionHandler.PREFIX + pk)); } if (key != null && key.containsKey("genomeIds") && !key.get("genomeIds").equals("")) { query.addFilterQuery("genome_id:(" + key.get("genomeIds").replaceAll(",", " OR ") + ")"); } LOGGER.trace("getGenomeDetails(): [{}] {}", SolrCore.GENOME.getSolrCoreName(), query); String apiResponse = dataApi.solrQuery(SolrCore.GENOME, query); Map resp = jsonReader.readValue(apiResponse); Map respBody = (Map) resp.get("response"); int numFound = (Integer) respBody.get("numFound"); List<Map> sdl = (List<Map>) respBody.get("docs"); JSONArray docs = new JSONArray(); for (Map doc : sdl) { JSONObject item = new JSONObject(); item.putAll(doc); docs.add(item); } JSONObject jsonResult = new JSONObject(); jsonResult.put("results", docs); jsonResult.put("total", numFound); jsonResult.writeJSONString(writer); }
From source file:edu.vt.vbi.patric.proteinfamily.FIGfamData.java
License:Apache License
@SuppressWarnings("unchecked") public void getGroupStats(ResourceRequest request, PrintWriter writer) throws IOException { DataApiHandler dataApi = new DataApiHandler(request); JSONObject figfams = new JSONObject(); Set<String> figfamIdList = new HashSet<>(); List<String> genomeIdList = new LinkedList<>(); // get family Type final String familyType = request.getParameter("familyType"); final String familyId = familyType + "_id"; // get genome list in order String genomeIds = request.getParameter("genomeIds"); try {//from ww w.j ava2 s .c o m SolrQuery query = new SolrQuery("genome_id:(" + genomeIds.replaceAll(",", " OR ") + ")"); query.addSort("genome_name", SolrQuery.ORDER.asc).addField("genome_id") .setRows(DataApiHandler.MAX_ROWS); LOGGER.trace("[{}] {}", SolrCore.GENOME.getSolrCoreName(), query); String apiResponse = dataApi.solrQuery(SolrCore.GENOME, query); Map resp = jsonReader.readValue(apiResponse); Map respBody = (Map) resp.get("response"); List<Genome> genomes = dataApi.bindDocuments((List<Map>) respBody.get("docs"), Genome.class); for (final Genome genome : genomes) { genomeIdList.add(genome.getId()); } if (genomeIdList.size() == 25000) { query.setStart(25000); apiResponse = dataApi.solrQuery(SolrCore.GENOME, query); resp = jsonReader.readValue(apiResponse); respBody = (Map) resp.get("response"); genomes = dataApi.bindDocuments((List<Map>) respBody.get("docs"), Genome.class); for (final Genome genome : genomes) { genomeIdList.add(genome.getId()); } } } catch (IOException e) { LOGGER.error(e.getMessage(), e); } // LOGGER.debug("genomeIdList: {}", genomeIdList); // getting genome counts per figfamID (figfam) // {stat:{field:{field:figfam_id,limit:-1,facet:{min:"min(aa_length)",max:"max(aa_length)",mean:"avg(aa_length)",ss:"sumsq(aa_length)",sum:"sum(aa_length)",dist:"percentile(aa_length,50,75,99,99.9)",field:{field:genome_id}}}}} try { long start = System.currentTimeMillis(); SolrQuery query = new SolrQuery("annotation:PATRIC AND feature_type:CDS"); // query.addFilterQuery("end:[3200 TO 4300] OR end:[4400 TO 4490] OR end:[4990 TO 4999]"); query.addFilterQuery(getSolrQuery(request)); query.addFilterQuery("!" + familyId + ":\"\""); query.setRows(0).setFacet(true).set("facet.threads", 15); query.add("json.facet", "{stat:{type:field,field:genome_id,limit:-1,facet:{figfams:{type:field,field:" + familyId + ",limit:-1,sort:{index:asc}}}}}"); LOGGER.trace("getGroupStats() 1/3: [{}] {}", SolrCore.FEATURE.getSolrCoreName(), query); String apiResponse = dataApi.solrQuery(SolrCore.FEATURE, query); long point = System.currentTimeMillis(); LOGGER.debug("1st query: {} ms", (point - start)); start = point; Map resp = jsonReader.readValue(apiResponse); Map facets = (Map) resp.get("facets"); Map stat = (Map) facets.get("stat"); final Map<String, String> figfamGenomeIdStr = new LinkedHashMap<>(); final Map<String, Integer> figfamGenomeCount = new LinkedHashMap<>(); final int genomeTotal = genomeIdList.size(); final Map<String, Integer> genomePosMap = new LinkedHashMap<>(); for (String genomeId : genomeIdList) { genomePosMap.put(genomeId, genomeIdList.indexOf(genomeId)); } final Map<String, List> figfamGenomeIdCountMap = new ConcurrentHashMap<>(); final Map<String, Set> figfamGenomeIdSet = new ConcurrentHashMap<>(); List<Map> genomeBuckets = (List<Map>) stat.get("buckets"); for (final Map bucket : genomeBuckets) { final String genomeId = (String) bucket.get("val"); final List<Map> figfamBucket = (List<Map>) ((Map) bucket.get("figfams")).get("buckets"); for (final Map figfam : figfamBucket) { final String figfamId = (String) figfam.get("val"); final String genomeCount = String.format("%02x", (Integer) figfam.get("count")); if (figfamGenomeIdCountMap.containsKey(figfamId)) { figfamGenomeIdCountMap.get(figfamId).set(genomePosMap.get(genomeId), genomeCount); } else { final List<String> genomeIdCount = new LinkedList<>(Collections.nCopies(genomeTotal, "00")); genomeIdCount.set(genomePosMap.get(genomeId), genomeCount); figfamGenomeIdCountMap.put(figfamId, genomeIdCount); } if (figfamGenomeIdSet.containsKey(figfamId)) { figfamGenomeIdSet.get(figfamId).add(genomeId); } else { final Set<String> genomeIdSet = new HashSet<>(); genomeIdSet.add(genomeId); figfamGenomeIdSet.put(figfamId, genomeIdSet); } } } for (String figfamId : figfamGenomeIdCountMap.keySet()) { final List genomeIdStr = figfamGenomeIdCountMap.get(figfamId); figfamGenomeIdStr.put(figfamId, StringUtils.join(genomeIdStr, "")); figfamGenomeCount.put(figfamId, figfamGenomeIdSet.get(figfamId).size()); } point = System.currentTimeMillis(); LOGGER.debug("1st query process : {} ms, figfamGenomeIdStr:{}, figfamGenomeCount:{}", (point - start), figfamGenomeIdStr.size(), figfamGenomeCount.size()); long start2nd = System.currentTimeMillis(); // 2nd query query.set("json.facet", "{stat:{type:field,field:" + familyId + ",limit:-1,facet:{min:\"min(aa_length)\",max:\"max(aa_length)\",mean:\"avg(aa_length)\",ss:\"sumsq(aa_length)\",sum:\"sum(aa_length)\"}}}"); LOGGER.trace("getGroupStats() 2/3: [{}] {}", SolrCore.FEATURE.getSolrCoreName(), query); apiResponse = dataApi.solrQuery(SolrCore.FEATURE, query); point = System.currentTimeMillis(); LOGGER.debug("2st query: {} ms", (point - start2nd)); start2nd = point; resp = jsonReader.readValue(apiResponse); facets = (Map) resp.get("facets"); stat = (Map) facets.get("stat"); List<Map> buckets = (List<Map>) stat.get("buckets"); for (Map bucket : buckets) { final String figfamId = (String) bucket.get("val"); final int count = (Integer) bucket.get("count"); double min, max, mean, sumsq, sum; if (bucket.get("min") instanceof Double) { min = (Double) bucket.get("min"); } else if (bucket.get("min") instanceof Integer) { min = ((Integer) bucket.get("min")).doubleValue(); } else { min = 0; } if (bucket.get("max") instanceof Double) { max = (Double) bucket.get("max"); } else if (bucket.get("max") instanceof Integer) { max = ((Integer) bucket.get("max")).doubleValue(); } else { max = 0; } if (bucket.get("mean") instanceof Double) { mean = (Double) bucket.get("mean"); } else if (bucket.get("mean") instanceof Integer) { mean = ((Integer) bucket.get("mean")).doubleValue(); } else { mean = 0; } if (bucket.get("ss") instanceof Double) { sumsq = (Double) bucket.get("ss"); } else if (bucket.get("ss") instanceof Integer) { sumsq = ((Integer) bucket.get("ss")).doubleValue(); } else { sumsq = 0; } if (bucket.get("sum") instanceof Double) { sum = (Double) bucket.get("sum"); } else if (bucket.get("sum") instanceof Integer) { sum = ((Integer) bucket.get("sum")).doubleValue(); } else { sum = 0; } // LOGGER.debug("bucket:{}, sumsq:{}, count: {}", bucket, sumsq, count); double std; if (count > 1) { // std = Math.sqrt(sumsq / (count - 1)); final double realSq = sumsq - (sum * sum) / count; std = Math.sqrt(realSq / (count - 1)); } else { std = 0; } final JSONObject aaLength = new JSONObject(); aaLength.put("min", min); aaLength.put("max", max); aaLength.put("mean", mean); aaLength.put("stddev", std); figfamIdList.add(figfamId); final JSONObject figfam = new JSONObject(); figfam.put("genomes", figfamGenomeIdStr.get(figfamId)); figfam.put("genome_count", figfamGenomeCount.get(figfamId)); figfam.put("feature_count", count); figfam.put("stats", aaLength); figfams.put(figfamId, figfam); } point = System.currentTimeMillis(); LOGGER.debug("2st query process: {} ms", (point - start2nd)); } catch (IOException e) { LOGGER.error(e.getMessage(), e); } // getting distinct figfam_product if (!figfamIdList.isEmpty()) { figfamIdList.remove(""); try { SolrQuery query = new SolrQuery("family_id:(" + StringUtils.join(figfamIdList, " OR ") + ")"); query.addFilterQuery("family_type:" + familyType); query.addField("family_id,family_product").setRows(figfamIdList.size()); LOGGER.debug("getGroupStats() 3/3: [{}] {}", SolrCore.FIGFAM_DIC.getSolrCoreName(), query); String apiResponse = dataApi.solrQuery(SolrCore.FIGFAM_DIC, query); Map resp = jsonReader.readValue(apiResponse); Map respBody = (Map) resp.get("response"); List<Map> sdl = (List<Map>) respBody.get("docs"); for (final Map doc : sdl) { final JSONObject figfam = (JSONObject) figfams.get(doc.get("family_id")); figfam.put("description", doc.get("family_product")); figfams.put(doc.get("family_id").toString(), figfam); } int i = 1; while (sdl.size() == 25000) { query.setStart(25000 * i); apiResponse = dataApi.solrQuery(SolrCore.FIGFAM_DIC, query); resp = jsonReader.readValue(apiResponse); respBody = (Map) resp.get("response"); sdl = (List<Map>) respBody.get("docs"); for (final Map doc : sdl) { final JSONObject figfam = (JSONObject) figfams.get(doc.get("family_id")); figfam.put("description", doc.get("family_product")); figfams.put(doc.get("family_id").toString(), figfam); } i++; } } catch (IOException e) { LOGGER.error(e.getMessage(), e); LOGGER.debug("::getGroupStats() 3/3, params: {}", request.getParameterMap().toString()); } figfams.writeJSONString(writer); } }
From source file:lux.solr.TLogTest.java
License:Mozilla Public License
private QueryResponse search(String q) throws SolrServerException { SolrQuery query = new SolrQuery(); query.setQuery(q);//from w w w . j a v a 2 s .c o m query.addField("lux_uri"); query.addField("lux_xml"); return solr.query(query); }
From source file:org.aksw.simba.bengal.triple2nl.nlp.relation.BoaPatternSelector.java
License:Apache License
/** * Returns all patterns from the index and their features for reverb and the * wordnet distance and the overall boa-boaScore. * // w w w . j a va 2 s. c o m * @param propertyUri * the property URI * @return a list of patterns */ private static Set<Pattern> querySolrIndex(String propertyUri) { Map<Integer, Pattern> patterns = new HashMap<>(); try { SolrQuery query = new SolrQuery("uri:\"" + propertyUri + "\""); query.addField("REVERB"); query.addField("WORDNET_DISTANCE"); query.addField("SUPPORT_NUMBER_OF_PAIRS_LEARNED_FROM"); query.addField("pos"); query.addField("boa-score"); query.addField("nlr-var"); query.addField("nlr-no-var"); query.setRows(10000); QueryResponse response = server.query(query); SolrDocumentList docList = response.getResults(); // return the first list of types for (SolrDocument d : docList) { Pattern pattern = new Pattern(); pattern.naturalLanguageRepresentation = (String) d.get("nlr-var"); pattern.naturalLanguageRepresentationWithoutVariables = (String) d.get("nlr-no-var"); pattern.features.put("REVERB", Double.valueOf((String) d.get("REVERB"))); pattern.features.put("WORDNET_DISTANCE", Double.valueOf((String) d.get("WORDNET_DISTANCE"))); pattern.features.put("SUPPORT_NUMBER_OF_PAIRS_LEARNED_FROM", Double.valueOf((String) d.get("SUPPORT_NUMBER_OF_PAIRS_LEARNED_FROM"))); pattern.posTags = (String) d.get("pos"); pattern.boaScore = Double.valueOf((String) d.get("boa-score")); pattern.naturalLanguageScore = calculateNaturalLanguageScore(pattern); // since ?D? and ?R? are removed so two patterns might look the // same if (isSuitable(pattern)) { // merge the pattern if (patterns.containsKey(pattern.hashCode())) { Pattern p = patterns.get(pattern.hashCode()); p.features.put("REVERB", pattern.features.get("REVERB") + p.features.get("REVERB")); p.features.put("WORDNET_DISTANCE", pattern.features.get("WORDNET_DISTANCE") + p.features.get("WORDNET_DISTANCE")); p.features.put("SUPPORT_NUMBER_OF_PAIRS_LEARNED_FROM", pattern.features.get("SUPPORT_NUMBER_OF_PAIRS_LEARNED_FROM") + p.features.get("SUPPORT_NUMBER_OF_PAIRS_LEARNED_FROM")); p.boaScore += pattern.boaScore; p.naturalLanguageScore += pattern.naturalLanguageScore; patterns.put(pattern.hashCode(), p); } else { patterns.put(pattern.hashCode(), pattern); } } } } catch (SolrServerException e) { System.out.println("Could not execute query: " + e); e.printStackTrace(); } return new HashSet<>(patterns.values()); }
From source file:org.ambraproject.article.service.BrowseServiceImpl.java
License:Apache License
/** * Returns a list of articles for a given category * @param params a collection filters / parameters to browse by * @return articles// www . j ava 2s. c om */ private BrowseResult getArticlesBySubjectViaSolr(BrowseParameters params) { BrowseResult result = new BrowseResult(); ArrayList<SearchHit> articles = new ArrayList<SearchHit>(); long total = 0; SolrQuery query = createCommonQuery(params.getJournalKey()); query.addField("title_display"); query.addField("author_display"); query.addField("article_type"); query.addField("publication_date"); query.addField("id"); query.addField("abstract_primary_display"); query.addField("eissn"); if (params.getSubjects() != null && params.getSubjects().length > 0) { StringBuffer subjectQuery = new StringBuffer(); for (String subject : params.getSubjects()) { subjectQuery.append("\"").append(subject).append("\"").append(" AND "); } // remove the last " AND " query.setQuery("subject_level_1:(" + subjectQuery.substring(0, subjectQuery.length() - 5) + ")"); } // we use subject_level_1 field instead of subject_facet field because // we are only interested in the top level subjects query.setFacet(true); query.addFacetField("subject_level_1"); query.setFacetMinCount(1); query.setFacetSort("index"); setSort(query, params); query.setStart(params.getPageNum() * params.getPageSize()); query.setRows(params.getPageSize()); try { QueryResponse response = this.serverFactory.getServer().query(query); SolrDocumentList documentList = response.getResults(); total = documentList.getNumFound(); for (SolrDocument document : documentList) { SearchHit sh = createArticleBrowseDisplay(document, query.toString()); articles.add(sh); } result.setSubjectFacet(facetCountsToHashMap(response.getFacetField("subject_level_1"))); } catch (SolrServerException e) { log.error("Unable to execute a query on the Solr Server.", e); } result.setTotal(total); result.setArticles(articles); return result; }
From source file:org.ambraproject.article.service.BrowseServiceImpl.java
License:Apache License
/** * Returns list of articles in a given date range, from newest to oldest * @param params the collection class of parameters. * @return the articles/* w w w .j a v a 2s . co m*/ */ private BrowseResult getArticlesByDateViaSolr(BrowseParameters params) { BrowseResult result = new BrowseResult(); ArrayList<SearchHit> articles = new ArrayList<SearchHit>(); long totalSize = 0; SolrQuery query = createCommonQuery(params.getJournalKey()); query.addField("title_display"); query.addField("author_display"); query.addField("article_type"); query.addField("publication_date"); query.addField("id"); query.addField("abstract_primary_display"); query.addField("eissn"); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); String sDate = sdf.format(params.getStartDate().getTime()); String eDate = sdf.format(params.getEndDate().getTime()); sDate = sDate + "T00:00:00Z"; eDate = eDate + "T00:00:00Z"; query.addFilterQuery("publication_date:[" + sDate + " TO " + eDate + "]"); StringBuffer sb = new StringBuffer(); if (params.getArticleTypes() != null && params.getArticleTypes().size() > 0) { for (URI uri : params.getArticleTypes()) { String path = uri.getPath(); int index = path.lastIndexOf("/"); if (index != -1) { String articleType = path.substring(index + 1); sb.append("\"").append(articleType).append("\"").append(" OR "); } } String articleTypesQuery = sb.substring(0, sb.length() - 4); if (articleTypesQuery.length() > 0) { query.addFilterQuery("article_type_facet:" + articleTypesQuery); } } setSort(query, params); query.setStart(params.getPageNum() * params.getPageSize()); query.setRows(params.getPageSize()); log.info("getArticlesByDate Solr Query:" + query.toString()); try { QueryResponse response = this.serverFactory.getServer().query(query); SolrDocumentList documentList = response.getResults(); totalSize = documentList.getNumFound(); for (SolrDocument document : documentList) { SearchHit sh = createArticleBrowseDisplay(document, query.toString()); articles.add(sh); } } catch (SolrServerException e) { log.error("Unable to execute a query on the Solr Server.", e); } result.setArticles(articles); result.setTotal(totalSize); return result; }