List of usage examples for org.apache.solr.client.solrj SolrQuery setStart
public SolrQuery setStart(Integer start)
From source file:net.yacy.cora.federate.solr.connector.AbstractSolrConnector.java
License:Open Source License
/** * get facets of the index: a list of lists with values that are most common in a specific field * @param query a query which is performed to get the facets * @param fields the field names which are selected as facet * @param maxresults the maximum size of the resulting maps * @return a map with key = facet field name, value = an ordered map of field values for that field * @throws IOException//from w ww . ja v a 2 s. c o m */ @Override public LinkedHashMap<String, ReversibleScoreMap<String>> getFacets(String query, int maxresults, final String... fields) throws IOException { // construct query assert fields.length > 0; final SolrQuery params = new SolrQuery(); params.setQuery(query); params.setRows(0); params.setStart(0); params.setFacet(true); params.setFacetMinCount(1); // there are many 0-count facets in the uninverted index cache params.setFacetLimit(maxresults); params.setFacetSort(FacetParams.FACET_SORT_COUNT); params.setParam(FacetParams.FACET_METHOD, FacetParams.FACET_METHOD_fc /*FACET_METHOD_fcs*/); params.setFields(fields); params.clearSorts(); params.setIncludeScore(false); for (String field : fields) params.addFacetField(field); // query the server QueryResponse rsp = getResponseByParams(params); LinkedHashMap<String, ReversibleScoreMap<String>> facets = new LinkedHashMap<String, ReversibleScoreMap<String>>( fields.length); for (String field : fields) { FacetField facet = rsp.getFacetField(field); ReversibleScoreMap<String> result = new ClusteredScoreMap<String>(UTF8.insensitiveUTF8Comparator); List<Count> values = facet.getValues(); if (values == null) continue; for (Count ff : values) if (ff.getCount() > 0) result.set(ff.getName(), (int) ff.getCount()); facets.put(field, result); } return facets; }
From source file:net.yacy.cora.federate.solr.connector.AbstractSolrConnector.java
License:Open Source License
@Override public SolrDocument getDocumentById(final String id, final String... fields) throws IOException { assert id.length() == Word.commonHashLength : "wrong id: " + id; final SolrQuery query = new SolrQuery(); // construct query StringBuilder sb = new StringBuilder(23); sb.append("{!cache=false raw f=").append(CollectionSchema.id.getSolrFieldName()).append('}').append(id); query.setQuery(sb.toString());//from w w w . j a v a 2 s.c o m //query.setQuery("*:*"); //query.addFilterQuery(sb.toString()); query.clearSorts(); query.setRows(1); query.setStart(0); if (fields != null && fields.length > 0) query.setFields(fields); query.setIncludeScore(false); // query the server try { final SolrDocumentList docs = getDocumentListByParams(query); if (docs == null || docs.isEmpty()) return null; SolrDocument doc = docs.get(0); return doc; } catch (final Throwable e) { clearCaches(); // we clear the in case that this is caused by OOM throw new IOException(e.getMessage(), e); } }
From source file:net.yacy.peers.Protocol.java
License:Open Source License
protected static int solrQuery(final SearchEvent event, final SolrQuery solrQuery, final int offset, final int count, final Seed target, final int partitions, final Blacklist blacklist) { if (event.query.getQueryGoal().getQueryString(false) == null || event.query.getQueryGoal().getQueryString(false).length() == 0) { return -1; // we cannot query solr only with word hashes, there is no clear text string }// ww w . jav a 2 s . co m event.addExpectedRemoteReferences(count); if (partitions > 0) solrQuery.set("partitions", partitions); solrQuery.setStart(offset); solrQuery.setRows(count); // set highlighting query attributes if (event.query.contentdom == Classification.ContentDomain.TEXT || event.query.contentdom == Classification.ContentDomain.ALL) { solrQuery.setHighlight(true); solrQuery.setHighlightFragsize(SearchEvent.SNIPPET_MAX_LENGTH); //solrQuery.setHighlightRequireFieldMatch(); solrQuery.setHighlightSimplePost("</b>"); solrQuery.setHighlightSimplePre("<b>"); solrQuery.setHighlightSnippets(5); for (CollectionSchema field : snippetFields) solrQuery.addHighlightField(field.getSolrFieldName()); } else { solrQuery.setHighlight(false); } boolean localsearch = target == null || target.equals(event.peers.mySeed()); Map<String, ReversibleScoreMap<String>> facets = new HashMap<String, ReversibleScoreMap<String>>( event.query.facetfields.size()); Map<String, LinkedHashSet<String>> snippets = new HashMap<String, LinkedHashSet<String>>(); // this will be a list of urlhash-snippet entries final QueryResponse[] rsp = new QueryResponse[] { null }; final SolrDocumentList[] docList = new SolrDocumentList[] { null }; String ip = target.getIP(); {// encapsulate expensive solr QueryResponse object if (localsearch && !Switchboard.getSwitchboard() .getConfigBool(SwitchboardConstants.DEBUG_SEARCH_REMOTE_SOLR_TESTLOCAL, false)) { // search the local index try { SolrConnector sc = event.getQuery().getSegment().fulltext().getDefaultConnector(); if (!sc.isClosed()) { rsp[0] = sc.getResponseByParams(solrQuery); docList[0] = rsp[0].getResults(); } } catch (final Throwable e) { Network.log.info("SEARCH failed (solr), localpeer (" + e.getMessage() + ")", e); return -1; } } else { try { final boolean myseed = target == event.peers.mySeed(); if (!myseed && !target.getFlagSolrAvailable()) { // skip if peer.dna has flag that last try resulted in error Network.log.info("SEARCH skip (solr), remote Solr interface not accessible, peer=" + target.getName()); return -1; } final String address = myseed ? "localhost:" + target.getPort() : target.getPublicAddress(ip); final int solrtimeout = Switchboard.getSwitchboard() .getConfigInt(SwitchboardConstants.FEDERATED_SERVICE_SOLR_INDEXING_TIMEOUT, 6000); Thread remoteRequest = new Thread() { @Override public void run() { this.setName("Protocol.solrQuery(" + solrQuery.getQuery() + " to " + target.hash + ")"); try { RemoteInstance instance = new RemoteInstance("http://" + address, null, "solr", solrtimeout); // this is a 'patch configuration' which considers 'solr' as default collection try { SolrConnector solrConnector = new RemoteSolrConnector(instance, myseed ? true : target.getVersion() >= 1.63, "solr"); if (!solrConnector.isClosed()) try { rsp[0] = solrConnector.getResponseByParams(solrQuery); docList[0] = rsp[0].getResults(); } catch (Throwable e) { } finally { solrConnector.close(); } } catch (Throwable ee) { } finally { instance.close(); } } catch (Throwable eee) { } } }; remoteRequest.start(); remoteRequest.join(solrtimeout); // just wait until timeout appears if (remoteRequest.isAlive()) { try { remoteRequest.interrupt(); } catch (Throwable e) { } Network.log.info("SEARCH failed (solr), remote Peer: " + target.getName() + "/" + target.getPublicAddress(ip) + " does not answer (time-out)"); target.setFlagSolrAvailable(false || myseed); return -1; // give up, leave remoteRequest abandoned. } // no need to close this here because that sends a commit to remote solr which is not wanted here } catch (final Throwable e) { Network.log.info("SEARCH failed (solr), remote Peer: " + target.getName() + "/" + target.getPublicAddress(ip) + " (" + e.getMessage() + ")"); target.setFlagSolrAvailable(false || localsearch); return -1; } } if (rsp[0] == null || docList[0] == null) { Network.log.info("SEARCH failed (solr), remote Peer: " + target.getName() + "/" + target.getPublicAddress(ip) + " returned null"); target.setFlagSolrAvailable(false || localsearch); return -1; } // evaluate facets for (String field : event.query.facetfields) { FacetField facet = rsp[0].getFacetField(field); ReversibleScoreMap<String> result = new ClusteredScoreMap<String>(UTF8.insensitiveUTF8Comparator); List<Count> values = facet == null ? null : facet.getValues(); if (values == null) continue; for (Count ff : values) { int c = (int) ff.getCount(); if (c == 0) continue; if (ff.getName().length() == 0) continue; // facet entry without text is not useful result.set(ff.getName(), c); } if (result.size() > 0) facets.put(field, result); } // evaluate snippets Map<String, Map<String, List<String>>> rawsnippets = rsp[0].getHighlighting(); // a map from the urlhash to a map with key=field and value = list of snippets if (rawsnippets != null) { nextsnippet: for (Map.Entry<String, Map<String, List<String>>> re : rawsnippets.entrySet()) { Map<String, List<String>> rs = re.getValue(); for (CollectionSchema field : snippetFields) { if (rs.containsKey(field.getSolrFieldName())) { List<String> s = rs.get(field.getSolrFieldName()); if (s.size() > 0) { LinkedHashSet<String> ls = new LinkedHashSet<String>(); ls.addAll(s); snippets.put(re.getKey(), ls); continue nextsnippet; } } } // no snippet found :( --we don't assign a value here by default; that can be done as an evaluation outside this method } } rsp[0] = null; } // evaluate result if (docList == null || docList[0].size() == 0) { Network.log.info("SEARCH (solr), returned 0 out of 0 documents from " + (target == null ? "shard" : ("peer " + target.hash + ":" + target.getName())) + " query = " + solrQuery.toString()); return 0; } List<URIMetadataNode> container = new ArrayList<URIMetadataNode>(); Network.log.info("SEARCH (solr), returned " + docList[0].size() + " out of " + docList[0].getNumFound() + " documents and " + facets.size() + " facets " + facets.keySet().toString() + " from " + (target == null ? "shard" : ("peer " + target.hash + ":" + target.getName()))); int term = count; Collection<SolrInputDocument> docs; if (event.addResultsToLocalIndex) { // only needed to store remote results docs = new ArrayList<SolrInputDocument>(docList[0].size()); } else docs = null; for (final SolrDocument doc : docList[0]) { if (term-- <= 0) { break; // do not process more that requested (in case that evil peers fill us up with rubbish) } // get one single search result if (doc == null) { continue; } URIMetadataNode urlEntry = new URIMetadataNode(doc); if (blacklist.isListed(BlacklistType.SEARCH, urlEntry.url())) { if (Network.log.isInfo()) { if (localsearch) { Network.log.info("local search (solr): filtered blacklisted url " + urlEntry.url()); } else { Network.log.info("remote search (solr): filtered blacklisted url " + urlEntry.url() + " from " + (target == null ? "shard" : ("peer " + target.hash + ":" + target.getName()))); } } continue; // block with blacklist } final String urlRejectReason = Switchboard.getSwitchboard().crawlStacker .urlInAcceptedDomain(urlEntry.url()); if (urlRejectReason != null) { if (Network.log.isInfo()) { if (localsearch) { Network.log.info("local search (solr): rejected url '" + urlEntry.url() + "' (" + urlRejectReason + ")"); } else { Network.log.info("remote search (solr): rejected url '" + urlEntry.url() + "' (" + urlRejectReason + ") from peer " + target.getName()); } } continue; // reject url outside of our domain } // passed all checks, store url if (!localsearch) { // put the remote documents to the local index. We must convert the solr document to a solr input document: if (event.addResultsToLocalIndex) { final SolrInputDocument sid = event.query.getSegment().fulltext().getDefaultConfiguration() .toSolrInputDocument(doc); // the input document stays untouched because it contains top-level cloned objects docs.add(sid); // will be stored to index, and is a full solr document, can be added to firstseen event.query.getSegment().setFirstSeenTime(urlEntry.hash(), Math.min(urlEntry.moddate().getTime(), System.currentTimeMillis())); } // after this conversion we can remove the largest and not used field text_t and synonyms_sxt from the document // because that goes into a search cache and would take a lot of memory in the search cache //doc.removeFields(CollectionSchema.text_t.getSolrFieldName()); doc.removeFields(CollectionSchema.synonyms_sxt.getSolrFieldName()); ResultURLs.stack(ASCII.String(urlEntry.url().hash()), urlEntry.url().getHost(), event.peers.mySeed().hash.getBytes(), UTF8.getBytes(target.hash), EventOrigin.QUERIES); } // add the url entry to the word indexes container.add(urlEntry); } final int dls = docList[0].size(); final int numFound = (int) docList[0].getNumFound(); docList[0].clear(); docList[0] = null; if (localsearch) { event.addNodes(container, facets, snippets, true, "localpeer", numFound); event.addFinalize(); event.addExpectedRemoteReferences(-count); Network.log.info( "local search (solr): localpeer sent " + container.size() + "/" + numFound + " references"); } else { if (event.addResultsToLocalIndex) { for (SolrInputDocument doc : docs) { event.query.getSegment().putDocument(doc); } docs.clear(); docs = null; } event.addNodes(container, facets, snippets, false, target.getName() + "/" + target.hash, numFound); event.addFinalize(); event.addExpectedRemoteReferences(-count); Network.log.info("remote search (solr): peer " + target.getName() + " sent " + (container.size() == 0 ? 0 : container.size()) + "/" + numFound + " references"); } return dls; }
From source file:net.yacy.search.index.ErrorCache.java
License:Open Source License
public ErrorCache(final Fulltext fulltext) { this.fulltext = fulltext; this.cache = new LinkedHashMap<String, CollectionConfiguration.FailDoc>(); // concurrently fill stack with latest values new Thread() { @Override// w w w . java2 s. c o m public void run() { final SolrQuery params = new SolrQuery(); params.setParam("defType", "edismax"); params.setStart(0); params.setRows(1000); params.setFacet(false); params.setSort( new SortClause(CollectionSchema.last_modified.getSolrFieldName(), SolrQuery.ORDER.desc)); params.setFields(CollectionSchema.id.getSolrFieldName()); params.setQuery( CollectionSchema.failreason_s.getSolrFieldName() + AbstractSolrConnector.CATCHALL_DTERM); params.set(CommonParams.DF, CollectionSchema.id.getSolrFieldName()); // DisMaxParams.QF or CommonParams.DF must be given SolrDocumentList docList; try { docList = fulltext.getDefaultConnector().getDocumentListByParams(params); if (docList != null) for (int i = docList.size() - 1; i >= 0; i--) { SolrDocument doc = docList.get(i); String hash = (String) doc.getFieldValue(CollectionSchema.id.getSolrFieldName()); cache.put(hash, null); } } catch (IOException e) { ConcurrentLog.logException(e); } } }.start(); }
From source file:net.yacy.search.index.ErrorCacheFiller.java
License:Open Source License
/** * Fills the error cache with recently failed document hashes found in the index *//* w ww . ja v a2 s. c om*/ @Override public void run() { final SolrQuery params = new SolrQuery(); params.setParam("defType", "edismax"); params.setStart(0); params.setRows(1000); params.setFacet(false); params.setSort(new SortClause(CollectionSchema.load_date_dt.getSolrFieldName(), SolrQuery.ORDER.desc)); // load_date_dt = faildate params.setFields(CollectionSchema.id.getSolrFieldName()); params.setQuery(CollectionSchema.failreason_s.getSolrFieldName() + AbstractSolrConnector.CATCHALL_DTERM); params.set(CommonParams.DF, CollectionSchema.id.getSolrFieldName()); // DisMaxParams.QF or CommonParams.DF must be given SolrDocumentList docList; try { docList = this.sb.index.fulltext().getDefaultConnector().getDocumentListByParams(params); if (docList != null) for (int i = docList.size() - 1; i >= 0; i--) { SolrDocument doc = docList.get(i); String hash = (String) doc.getFieldValue(CollectionSchema.id.getSolrFieldName()); cache.putHashOnly(hash); } } catch (IOException e) { ConcurrentLog.logException(e); } }
From source file:net.yacy.search.query.QueryParams.java
License:Open Source License
private SolrQuery getBasicParams(boolean getFacets, List<String> fqs) { final SolrQuery params = new SolrQuery(); params.setParam("defType", "edismax"); params.setParam(DisMaxParams.QF, CollectionSchema.text_t.getSolrFieldName() + "^1.0"); params.setStart(this.offset); params.setRows(this.itemsPerPage); params.setFacet(false);/* w w w. j ava2s . c o m*/ if (this.ranking.coeff_date == RankingProfile.COEFF_MAX) { // set a most-recent ordering params.setSort(new SortClause(CollectionSchema.last_modified.getSolrFieldName(), SolrQuery.ORDER.desc)); //params.setSortField(CollectionSchema.last_modified.getSolrFieldName(), ORDER.desc); // deprecated in Solr 4.2 } // add site facets fqs.addAll(getFacetsFilterQueries()); if (fqs.size() > 0) { params.setFilterQueries(fqs.toArray(new String[fqs.size()])); } // set facet query attributes if (getFacets && this.facetfields.size() > 0) { params.setFacet(true); params.setFacetMinCount(1); params.setFacetLimit(FACETS_STANDARD_MAXCOUNT); params.setFacetSort(FacetParams.FACET_SORT_COUNT); params.setParam(FacetParams.FACET_METHOD, FacetParams.FACET_METHOD_fcs); for (String field : this.facetfields) params.addFacetField("{!ex=" + field + "}" + field); // params.addFacetField("{!ex=" + field + "}" + field); if (this.facetfields.contains(CollectionSchema.dates_in_content_dts.name())) { params.setParam("facet.range", CollectionSchema.dates_in_content_dts.name()); @SuppressWarnings({ "static-access", "deprecation" }) String start = TrieDateField .formatExternal(new Date(System.currentTimeMillis() - 1000L * 60L * 60L * 24L * 3)); @SuppressWarnings({ "static-access", "deprecation" }) String end = TrieDateField .formatExternal(new Date(System.currentTimeMillis() + 1000L * 60L * 60L * 24L * 3)); params.setParam( "f." + CollectionSchema.dates_in_content_dts.getSolrFieldName() + ".facet.range.start", start); params.setParam( "f." + CollectionSchema.dates_in_content_dts.getSolrFieldName() + ".facet.range.end", end); params.setParam( "f." + CollectionSchema.dates_in_content_dts.getSolrFieldName() + ".facet.range.gap", "+1DAY"); params.setParam("f." + CollectionSchema.dates_in_content_dts.getSolrFieldName() + ".facet.sort", "index"); params.setParam("f." + CollectionSchema.dates_in_content_dts.getSolrFieldName() + ".facet.limit", Integer.toString(FACETS_DATE_MAXCOUNT)); // the year constraint should cause that limitation already } //for (String k: params.getParameterNames()) {ArrayList<String> al = new ArrayList<>(); for (String s: params.getParams(k)) al.add(s); System.out.println("Parameter: " + k + "=" + al.toString());} //http://localhost:8090/solr/collection1/select?q=*:*&rows=0&facet=true&facet.field=dates_in_content_dts&f.dates_in_content_dts.facet.limit=730&f.dates_in_content_dts.facet.sort=index } else { params.setFacet(false); } params.setFields("*", "score"); // we need the score for post-ranking return params; }
From source file:nl.knaw.dans.common.solr.converter.SolrQueryRequestConverter.java
License:Apache License
public static SolrQuery convert(SearchRequest request) throws SolrSearchEngineException { SolrQuery queryObj = new SolrQuery(); // set query/*from w ww. j a v a2 s .c om*/ String query = ""; String qstr = request.getQuery().getQueryString(); if (qstr != null) query += qstr; FieldSet<?> fieldQueries = request.getFieldQueries(); if (fieldQueries != null) { for (Field<?> fieldQuery : fieldQueries) { query += fieldQueryToString(fieldQuery) + " "; } } queryObj.setQuery(query); // set filter queries FieldSet<?> filterQueries = request.getFilterQueries(); if (filterQueries != null) { int i = 0; String[] fq = new String[filterQueries.size()]; for (Field<?> field : filterQueries) { fq[i] = fieldQueryToString(field); i++; } queryObj.setFilterQueries(fq); } // set sort fields List<SortField> sortFields = request.getSortFields(); if (sortFields != null) { for (SortField sortField : sortFields) { ORDER order; if (sortField.getValue().equals(SortOrder.DESC)) order = ORDER.desc; else order = ORDER.asc; String sortFieldName = sortField.getName(); if (sortField.getSortType().equals(SortType.BY_RELEVANCE_SCORE)) sortFieldName = "score"; queryObj.addSortField(sortFieldName, order); } } // faceting enabled Set<String> facetFields = request.getFacetFields(); boolean enableFaceting = facetFields != null && facetFields.size() > 0; queryObj.setFacet(enableFaceting); if (enableFaceting) { for (String facetField : facetFields) { queryObj.addFacetField(facetField); } } // hit highlighting queryObj.setHighlight(request.isHighlightingEnabled()); // paging queryObj.setRows(request.getLimit()); queryObj.setStart(request.getOffset()); // misc settings queryObj.setIncludeScore(true); return queryObj; }
From source file:no.sesat.search.query.token.SolrTokenEvaluator.java
License:Open Source License
/** * Search solr and find out if the given tokens are company, firstname, lastname etc * @param query/*from w w w . ja va 2 s .c o m*/ */ @SuppressWarnings("unchecked") private Map<String, List<TokenMatch>> query(final String query) throws EvaluationException { LOG.trace("queryFast( " + query + " )"); Map<String, List<TokenMatch>> result = null; if (query != null && 0 < query.length()) { try { result = (Map<String, List<TokenMatch>>) CACHE_QUERY.getFromCache(query, REFRESH_PERIOD); } catch (NeedsRefreshException nre) { boolean updatedCache = false; result = new HashMap<String, List<TokenMatch>>(); String url = null; try { final String token = query.replaceAll("\"", ""); // set up query final SolrQuery solrQuery = new SolrQuery().setQuery("list_entry_shingle:\"" + token + "\"") .setRows(INITIAL_ROWS_TO_FETCH); // when the root logger is set to DEBUG do not limit connection times if (Logger.getRootLogger().getLevel().isGreaterOrEqual(Level.INFO)) { // default timeout is half second. TODO make configuration. solrQuery.setTimeAllowed(500); } // query for hits QueryResponse response = factory.getSolrServer().query(solrQuery); final int numberOfHits = (int) response.getResults().getNumFound(); boolean more = false; do { DUMP.info(solrQuery.toString()); final SolrDocumentList docs = response.getResults(); // iterate through docs for (SolrDocument doc : docs) { final String name = (String) doc.getFieldValue("list_name"); final String exactname = EXACT_PREFIX + name; // remove words made solely of characters that the parser considers whitespace final String hit = ((String) doc.getFieldValue("list_entry")) .replaceAll("\\b" + SKIP_REGEX + "+\\b", " "); final String synonym = (String) doc.getFieldValue("list_entry_synonym"); if (factory.usesListName(name, exactname)) { addMatch(name, hit, synonym, query, result); if (hit.equalsIgnoreCase(query.trim())) { addMatch(exactname, hit, synonym, query, result); } } } int rest = numberOfHits - INITIAL_ROWS_TO_FETCH; if (!more && rest > 0) { more = true; solrQuery.setStart(INITIAL_ROWS_TO_FETCH); solrQuery.setRows(rest); // query response = factory.getSolrServer().query(solrQuery); } else { more = false; } } while (more); result = Collections.<String, List<TokenMatch>>unmodifiableMap(result); CACHE_QUERY.putInCache(query, result); updatedCache = true; } catch (SolrServerException ex) { LOG.error(ex.getMessage(), ex); throw new EvaluationException(ERR_QUERY_FAILED + url, ex); } finally { if (!updatedCache) { CACHE_QUERY.cancelUpdate(query); } } } } else { result = Collections.<String, List<TokenMatch>>emptyMap(); } return result; }
From source file:org.ala.dao.FulltextSearchDaoImplSolr.java
License:Open Source License
private QueryResponse getSolrQueryResponse(SolrQuery solrQuery, String[] filterQuery, Integer pageSize, Integer startIndex, String sortField, String sortDirection) throws Exception { if (logger.isDebugEnabled()) { logger.debug(solrQuery.getQuery()); }/* w w w .ja v a 2 s. co m*/ // set the facet query if set addFqs(solrQuery, filterQuery); solrQuery.setRows(pageSize); solrQuery.setStart(startIndex); solrQuery.setSortField(sortField, ORDER.valueOf(sortDirection)); // do the Solr search return solrUtils.getSolrServer().query(solrQuery); // can throw exception }
From source file:org.ala.dao.FulltextSearchDaoImplSolr.java
License:Open Source License
/** * Retrieves a simple count using the supplied query * //ww w . ja va2 s . c o m * @param query * @return * @throws SolrServerException */ private int doCountQuery(String query) throws Exception { logger.info("Count query: " + query); //do a query to retrieve a count SolrQuery solrQuery = new SolrQuery(); solrQuery.setQueryType("standard"); solrQuery.setFacet(false); solrQuery.setFacetMinCount(0); solrQuery.setFacetLimit(10000); solrQuery.setRows(0); solrQuery.setStart(0); solrQuery.setQuery(query); QueryResponse qr = solrUtils.getSolrServer().query(solrQuery); // can throw exception SolrDocumentList sdl = qr.getResults(); return (int) sdl.getNumFound(); }