List of usage examples for org.apache.solr.handler.component ResponseBuilder getFilters
public List<Query> getFilters()
From source file:com.plugtree.solradvert.AdvertComponent.java
License:Apache License
private Collection<Object> getFacts(ResponseBuilder rb) { Collection<Object> facts = new ArrayList<Object>(); QueryFactsCollector factsCollector = new QueryFactsCollector(); // put the main query if (rb.getQuery() != null) { factsCollector.collect(rb.getQuery(), facts); }//w ww .ja va2 s .co m // put all the filter queries if (rb.getFilters() != null) { for (Query fq : rb.getFilters()) { factsCollector.collect(fq, facts); } } // put the AdvertQuery // this is only for backwards-compatibility, so old tests don't fail AdvertQuery aq = new AdvertQueryImpl(rb); facts.add(aq); // put the SchemaTool SchemaTool st = new SchemaTool(rb); facts.add(st); // put the response builder facts.add(rb); logger.debug("Collected facts: " + facts); return facts; }
From source file:com.plugtree.solradvert.core.AdvertQueryImpl.java
License:Apache License
public AdvertQueryImpl(ResponseBuilder rb) { this.rb = rb; this.q = rb.getQuery(); this.fqs = rb.getFilters(); }
From source file:com.sn.solr.plugin.rank.RankEngine.java
License:Apache License
/** * Provides implementation for Dense ranking ["1223"] as identified by the * {@link RankStrategy#LEGACY_DENSE} the difference is that this * implementation is computed without using facet results so this will * noticeably slower than computing rank based on facets * use {@link RankStrategy#DENSE}. Besides this implementation might cause * lot of cache evictions putting stress on memory. * * @see #computeDenseRank(List)//ww w. j av a 2 s . c o m * * @param pairList List of {@link Pair} objects that holds the value of rank * field & respective count. */ @Deprecated public static Map<String, Number> computeLegacyDenseRank(ResponseBuilder rb, String idField, String rankField) throws IOException { SolrIndexSearcher searcher = rb.req.getSearcher(); SolrParams params = rb.req.getParams();// .getParams(FacetParams.FACET_FIELD); String _start = params.get(CommonParams.START); String _rows = params.get(CommonParams.ROWS); int start = 0; int rows = 10; if (_start != null & AppHelper.isInteger(_start)) start = new Integer(_start); if (_rows != null & AppHelper.isInteger(_rows)) rows = new Integer(_rows); LOG.info("Computing rank using strategy: {}", RankStrategy.ORDINAL.getDescription()); FieldSelector fs = new MapFieldSelector(new String[] { idField, rankField }); Map<String, Number> rankMap = new HashMap<String, Number>(); DocList docs = searcher.getDocList(rb.getQuery(), rb.getFilters(), rb.getSortSpec().getSort(), 0, start + rows, 0); int denseRank = 1; int _CurrScore = 0; int _PrevScore = 0; int i = 0; for (DocIterator it = docs.iterator(); it.hasNext();) { Document doc = searcher.doc(it.nextDoc(), fs); _CurrScore = new Integer(doc.get(rankField)); if (i == 0) { _PrevScore = _CurrScore; } if (_PrevScore != _CurrScore) { _PrevScore = _CurrScore; denseRank++; } if (i >= start) { rankMap.put(doc.get(idField), denseRank); } i++; } return rankMap; }
From source file:com.teaspoonconsulting.solracls.SolrACLQueryComponent.java
License:Apache License
@Override public void prepare(ResponseBuilder rb) throws IOException { SolrQueryRequest req = rb.req;/*from w w w . j a va 2 s . com*/ SolrParams params = req.getParams(); String principalString = params.get(principalsParameter); if (principalString != null) { String[] principals = principalString.split(", *"); Arrays.sort(principals); String key = Arrays.toString(principals); Query f = filterCache.get(key); if (f == null) { f = buildFilterForPrincipals(principals); filterCache.put(key, f); } List<Query> filters = rb.getFilters(); if (filters == null) { filters = new ArrayList<Query>(); } filters.add(f); rb.setFilters(filters); } super.prepare(rb); }
From source file:com.zvents.solr.components.ScoreStatsComponent.java
License:Open Source License
@Override public void prepare(ResponseBuilder rb) throws IOException { boolean bComputeScores = rb.req.getParams().getBool(SCORE_PARAM, false); if (!bComputeScores) return;//from ww w . j a va 2 s .c o m List<Query> lFilters = rb.getFilters(); if (lFilters == null) { lFilters = new ArrayList<Query>(); rb.setFilters(lFilters); } //Add the post filter. lFilters.add(new ScoreStatsPostFilter()); }
From source file:com.zvents.solr.components.ScoreStatsComponent.java
License:Open Source License
@Override public void process(ResponseBuilder rb) throws IOException { boolean bComputeScores = rb.req.getParams().getBool(SCORE_PARAM, false); if (!bComputeScores) return;/*from w w w . j av a 2s. com*/ //Find the postfilter and grab the stats List<Query> lFilters = rb.getFilters(); if (lFilters != null) { for (Query q : lFilters) { if (q instanceof ScoreStatsPostFilter) { ScoreStatsPostFilter pf = (ScoreStatsPostFilter) q; QueryCommand cmd = rb.getQueryCommand(); QueryResultKey key = new QueryResultKey(cmd.getQuery(), cmd.getFilterList(), cmd.getSort(), cmd.getFlags()); NamedList<Number> scoreStats = null; long iNumDocs = pf.getNumDocs(); if (iNumDocs < 0) { scoreStats = (NamedList<Number>) rb.req.getSearcher().cacheLookup(CACHE_NAME, key); } else { float fSumSquaredScores = pf.getSumSquaredScores(); float fAvg = pf.getSumScores() / iNumDocs; float fMax = pf.getMaxScore(); float fVariance = pf.getSumSquaredScores() / iNumDocs - fAvg * fAvg; float fStdDev = 1.0f; if (fVariance > 0) fStdDev = (float) Math.sqrt(fVariance); //Basic stuff to calculate average, std-dev, min just there for information scoreStats = new NamedList<Number>(); scoreStats.add("min", pf.getMinScore()); scoreStats.add("max", fMax); scoreStats.add("avg", fAvg); scoreStats.add("stdDev", fStdDev); scoreStats.add("numDocs", iNumDocs); scoreStats.add("sumSquaredScores", fSumSquaredScores); scoreStats.add("sumScores", pf.getSumScores()); rb.req.getSearcher().cacheInsert(CACHE_NAME, key, scoreStats); } if (scoreStats != null) rb.rsp.add("scoreStats", scoreStats); } } } }
From source file:opennlp.tools.similarity.apps.solr.IterativeQueryComponent.java
License:Apache License
private ResponseBuilder substituteField(ResponseBuilder rb, String newFieldName) { SolrParams params = rb.req.getParams(); String query = params.get("q"); String currField = StringUtils.substringBetween(" " + query, " ", ":"); if (currField != null && newFieldName != null) query = query.replace(currField, newFieldName); NamedList values = params.toNamedList(); values.remove("q"); values.add("q", query); params = SolrParams.toSolrParams(values); rb.req.setParams(params);// www.j av a 2s .c o m rb.setQueryString(query); String defType = params.get(QueryParsing.DEFTYPE, QParserPlugin.DEFAULT_QTYPE); // get it from the response builder to give a different component a chance // to set it. String queryString = rb.getQueryString(); if (queryString == null) { // this is the normal way it's set. queryString = params.get(CommonParams.Q); rb.setQueryString(queryString); } QParser parser = null; try { parser = QParser.getParser(rb.getQueryString(), defType, rb.req); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } Query q = null; try { q = parser.getQuery(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } if (q == null) { // normalize a null query to a query that matches nothing q = new BooleanQuery(); } rb.setQuery(q); try { rb.setSortSpec(parser.getSort(true)); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } rb.setQparser(parser); /* try { rb.setScoreDoc(parser.getPaging()); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } */ String[] fqs = rb.req.getParams().getParams(CommonParams.FQ); if (fqs != null && fqs.length != 0) { List<Query> filters = rb.getFilters(); if (filters == null) { filters = new ArrayList<Query>(fqs.length); } for (String fq : fqs) { if (fq != null && fq.trim().length() != 0) { QParser fqp = null; try { fqp = QParser.getParser(fq, null, rb.req); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } try { filters.add(fqp.getQuery()); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } } // only set the filters if they are not empty otherwise // fq=&someotherParam= will trigger all docs filter for every request // if filter cache is disabled if (!filters.isEmpty()) { rb.setFilters(filters); } } return rb; }
From source file:org.alfresco.solr.component.spellcheck.AlfrescoSpellCheckCollator.java
License:Open Source License
public List<AlfrescoSpellCheckCollation> collate(SpellingResult result, String originalQuery, ResponseBuilder ultimateResponse) { List<AlfrescoSpellCheckCollation> collations = new ArrayList<>(); QueryComponent queryComponent = null; if (ultimateResponse.components != null) { for (SearchComponent sc : ultimateResponse.components) { if (sc instanceof QueryComponent) { queryComponent = (QueryComponent) sc; break; }/*from ww w . j a va 2 s . c om*/ } } boolean verifyCandidateWithQuery = true; int maxTries = maxCollationTries; int maxNumberToIterate = maxTries; if (maxTries < 1) { maxTries = 1; maxNumberToIterate = maxCollations; verifyCandidateWithQuery = false; } if (queryComponent == null && verifyCandidateWithQuery) { LOG.info( "Could not find an instance of QueryComponent. Disabling collation verification against the index."); maxTries = 1; verifyCandidateWithQuery = false; } docCollectionLimit = docCollectionLimit > 0 ? docCollectionLimit : 0; int maxDocId = -1; if (verifyCandidateWithQuery && docCollectionLimit > 0) { IndexReader reader = ultimateResponse.req.getSearcher().getIndexReader(); maxDocId = reader.maxDoc(); } JSONObject alfrescoJSON = (JSONObject) ultimateResponse.req.getContext().get(AbstractQParser.ALFRESCO_JSON); String originalAftsQuery = alfrescoJSON != null ? alfrescoJSON.getString("query") : ultimateResponse.getQueryString(); int tryNo = 0; int collNo = 0; PossibilityIterator possibilityIter = new PossibilityIterator(result.getSuggestions(), maxNumberToIterate, maxCollationEvaluations, suggestionsMayOverlap); while (tryNo < maxTries && collNo < maxCollations && possibilityIter.hasNext()) { PossibilityIterator.RankedSpellPossibility possibility = possibilityIter.next(); String collationQueryStr = getCollation(originalQuery, possibility.corrections); int hits = 0; String aftsQuery = null; if (verifyCandidateWithQuery) { tryNo++; SolrQueryRequest req = ultimateResponse.req; SolrParams origParams = req.getParams(); ModifiableSolrParams params = new ModifiableSolrParams(origParams); Iterator<String> origParamIterator = origParams.getParameterNamesIterator(); int pl = SpellingParams.SPELLCHECK_COLLATE_PARAM_OVERRIDE.length(); while (origParamIterator.hasNext()) { String origParamName = origParamIterator.next(); if (origParamName.startsWith(SpellingParams.SPELLCHECK_COLLATE_PARAM_OVERRIDE) && origParamName.length() > pl) { String[] val = origParams.getParams(origParamName); if (val.length == 1 && val[0].length() == 0) { params.set(origParamName.substring(pl), (String[]) null); } else { params.set(origParamName.substring(pl), val); } } } // we don't set the 'q' param, as we'll pass the query via JSON. // params.set(CommonParams.Q, collationQueryStr); params.remove(CommonParams.START); params.set(CommonParams.ROWS, "" + docCollectionLimit); // we don't want any stored fields params.set(CommonParams.FL, "id"); // we'll sort by doc id to ensure no scoring is done. params.set(CommonParams.SORT, "_docid_ asc"); // If a dismax query, don't add unnecessary clauses for scoring params.remove(DisMaxParams.TIE); params.remove(DisMaxParams.PF); params.remove(DisMaxParams.PF2); params.remove(DisMaxParams.PF3); params.remove(DisMaxParams.BQ); params.remove(DisMaxParams.BF); // Collate testing does not support Grouping (see SOLR-2577) params.remove(GroupParams.GROUP); boolean useQStr = true; if (alfrescoJSON != null) { try { aftsQuery = originalAftsQuery.replaceAll(Pattern.quote(originalQuery), Matcher.quoteReplacement(collationQueryStr)); alfrescoJSON.put("query", aftsQuery); req.getContext().put(AbstractQParser.ALFRESCO_JSON, alfrescoJSON); useQStr = false; } catch (JSONException e) { LOG.warn("Exception trying to get/set the query from/to ALFRESCO_JSON.]" + e); } } else { aftsQuery = collationQueryStr; } req.setParams(params); // creating a request here... make sure to close it! ResponseBuilder checkResponse = new ResponseBuilder(req, new SolrQueryResponse(), Arrays.<SearchComponent>asList(queryComponent)); checkResponse.setQparser(ultimateResponse.getQparser()); checkResponse.setFilters(ultimateResponse.getFilters()); checkResponse.components = Arrays.<SearchComponent>asList(queryComponent); if (useQStr) { checkResponse.setQueryString(collationQueryStr); } try { queryComponent.prepare(checkResponse); if (docCollectionLimit > 0) { int f = checkResponse.getFieldFlags(); checkResponse.setFieldFlags(f |= SolrIndexSearcher.TERMINATE_EARLY); } queryComponent.process(checkResponse); hits = (Integer) checkResponse.rsp.getToLog().get("hits"); } catch (EarlyTerminatingCollectorException etce) { assert (docCollectionLimit > 0); assert 0 < etce.getNumberScanned(); assert 0 < etce.getNumberCollected(); if (etce.getNumberScanned() == maxDocId) { hits = etce.getNumberCollected(); } else { hits = (int) (((float) (maxDocId * etce.getNumberCollected())) / (float) etce.getNumberScanned()); } } catch (Exception e) { LOG.warn( "Exception trying to re-query to check if a spell check possibility would return any hits." + e); } finally { checkResponse.req.close(); } } if (hits > 0 || !verifyCandidateWithQuery) { collNo++; AlfrescoSpellCheckCollation collation = new AlfrescoSpellCheckCollation(); collation.setCollationQuery(aftsQuery); collation.setCollationQueryString(collationQueryStr); collation.setHits(hits); collation.setInternalRank( suggestionsMayOverlap ? ((possibility.rank * 1000) + possibility.index) : possibility.rank); NamedList<String> misspellingsAndCorrections = new NamedList<>(); for (SpellCheckCorrection corr : possibility.corrections) { misspellingsAndCorrections.add(corr.getOriginal().toString(), corr.getCorrection()); } collation.setMisspellingsAndCorrections(misspellingsAndCorrections); collations.add(collation); } if (LOG.isDebugEnabled()) { LOG.debug("Collation: " + aftsQuery + (verifyCandidateWithQuery ? (" will return " + hits + " hits.") : "")); } } return collations; }
From source file:org.apache.manifoldcf.examples.ManifoldCFSecurityFilter.java
License:Apache License
/** SearchComponent prepare() method. * All SearchComponents have this method. This one modifies the query based on the input parameters. *@param rb is the response builder object, which contains both the input and the response. *///from w ww . ja va 2 s . c om @Override public void prepare(ResponseBuilder rb) throws IOException { // Get the request parameters SolrParams params = rb.req.getParams(); // Log that we got here LOG.info("prepare() entry params:\n" + params + "\ncontext: " + rb.req.getContext()); // Certain queries make it through unmodified. String qry = (String) params.get(CommonParams.Q); if (qry != null) { //Check global allowed searches for (String ga : globalAllowed) { if (qry.equalsIgnoreCase(ga.trim())) // Allow this query through unchanged return; } } // Get the authorization domain from the parameters (if any) String authorizationDomain = params.get(AUTHORIZATION_DOMAIN_NAME); if (authorizationDomain == null) authorizationDomain = ""; // Get the authenticated user name from the parameters String authenticatedUserName = params.get(AUTHENTICATED_USER_NAME); if (authenticatedUserName == null) // We could just throw an error, but then many of the innocent queries the Solr does would fail. So, just return instead. return; LOG.info("ManifoldCFSecurityFilter: Trying to match docs for user '" + authenticatedUserName + "'"); // Check the configuration arguments for validity if (connection == null) { throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error initializing ManifoldCFSecurityFilter component: 'AuthorityServiceBaseURL' init parameter required"); } // Talk to the authority service and get the access tokens List<String> userAccessTokens = getAccessTokens(authorizationDomain, authenticatedUserName); // Build a new boolean query, which we'll add to the query at the end BooleanQuery bq = new BooleanQuery(); Query allowShareOpen = new TermQuery(new Term(fieldAllowShare, EMPTY_FIELD_VALUE)); Query denyShareOpen = new TermQuery(new Term(fieldDenyShare, EMPTY_FIELD_VALUE)); Query allowParentOpen = new TermQuery(new Term(fieldAllowParent, EMPTY_FIELD_VALUE)); Query denyParentOpen = new TermQuery(new Term(fieldDenyParent, EMPTY_FIELD_VALUE)); Query allowDocumentOpen = new TermQuery(new Term(fieldAllowDocument, EMPTY_FIELD_VALUE)); Query denyDocumentOpen = new TermQuery(new Term(fieldDenyDocument, EMPTY_FIELD_VALUE)); if (userAccessTokens.size() == 0) { // Only open documents can be included. // That query is: // (fieldAllowShare is empty AND fieldDenyShare is empty AND fieldAllowDocument is empty AND fieldDenyDocument is empty) // We're trying to map to: -(fieldAllowShare:*), which is not the best way to do this kind of thing in Lucene. // Filter caching makes it tolerable, but a much better approach is to use a default value as a dedicated term to match. // That is what we do below. bq.add(allowShareOpen, BooleanClause.Occur.MUST); bq.add(denyShareOpen, BooleanClause.Occur.MUST); bq.add(allowParentOpen, BooleanClause.Occur.MUST); bq.add(denyParentOpen, BooleanClause.Occur.MUST); bq.add(allowDocumentOpen, BooleanClause.Occur.MUST); bq.add(denyDocumentOpen, BooleanClause.Occur.MUST); } else { // Extend the query appropriately for each user access token. bq.add(calculateCompleteSubquery(fieldAllowShare, fieldDenyShare, allowShareOpen, denyShareOpen, userAccessTokens), BooleanClause.Occur.MUST); bq.add(calculateCompleteSubquery(fieldAllowParent, fieldDenyParent, allowParentOpen, denyParentOpen, userAccessTokens), BooleanClause.Occur.MUST); bq.add(calculateCompleteSubquery(fieldAllowDocument, fieldDenyDocument, allowDocumentOpen, denyDocumentOpen, userAccessTokens), BooleanClause.Occur.MUST); } // Concatenate with the user's original query. List<Query> list = rb.getFilters(); if (list == null) { list = new ArrayList<Query>(); rb.setFilters(list); } list.add(new ConstantScoreQuery(bq)); }
From source file:org.sakaiproject.nakamura.solr.NakamuraQueryComponent.java
License:Apache License
/** * @param rb//from ww w . ja v a 2 s . c o m * @param f */ private void addFilter(ResponseBuilder rb, Query f) { List<Query> filters = rb.getFilters(); if (filters == null) { filters = new ArrayList<Query>(); } filters.add(f); rb.setFilters(filters); }