Example usage for org.apache.solr.common.params CommonParams ROWS

List of usage examples for org.apache.solr.common.params CommonParams ROWS

Introduction

In this page you can find the example usage for org.apache.solr.common.params CommonParams ROWS.

Prototype

String ROWS

To view the source code for org.apache.solr.common.params CommonParams ROWS.

Click Source Link

Document

number of documents to return starting at "start"

Usage

From source file:com.tsgrp.solr.handler.NYPhilTagAutoCompleteHandler.java

License:Mozilla Public License

@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse res)
        throws Exception, ParseException, InstantiationException, IllegalAccessException {
    NamedList<Object> params = req.getParams().toNamedList();

    params.add(CommonParams.ROWS, 0);
    params.add(FacetParams.FACET, true);
    params.add(FacetParams.FACET_FIELD, NYPhilSolrConstants.NPT_CONTENT_FACET);
    params.add(FacetParams.FACET_MINCOUNT, 1);
    params.add(FacetParams.FACET_SORT, FacetParams.FACET_SORT_INDEX);
    params.add(CommonParams.HEADER_ECHO_PARAMS, "explicit");
    params.add(CommonParams.WT, "json");
    params.add("json.nl", "map");

    String query = (String) params.get(PARAM_VALUE);
    if (query == null || query.length() == 0) {
        query = "*";
    } else {//from   www.  ja  v a  2 s .c  om
        query = QueryParser.escape(query.toLowerCase());
    }

    String[] queryTerms = query.split(" ");

    // wrap our query term in parentheses to allow searching on terms separated by whitespace
    StringBuffer q = new StringBuffer();

    // for each query term, require the term with a trailing wildcard match
    for (String queryTerm : queryTerms) {

        // remove all non-alphanumeric chars from the query term
        queryTerm = QUERY_TERM_REGEX.matcher(queryTerm.toLowerCase()).replaceAll("");
        q.append("+").append(NYPhilSolrConstants.NPT_CONTENT_ESC).append(":")
                .append(QueryParser.escape(queryTerm)).append("* ");
    }
    q.append("+").append(NYPhilSolrConstants.NPT_STATUS_ESC).append(":")
            .append(NYPhilSolrConstants.STATUS_APPROVED);

    params.add(CommonParams.Q, q.toString());

    if (logger.isDebugEnabled()) {
        logger.debug("Autocomplete Query: " + q.toString());
    }

    String cb = (String) params.get(PARAM_CALLBACK);
    if (cb != null && cb.length() > 0) {
        params.add("json.wrf", cb);
    }

    req.setParams(SolrParams.toSolrParams(params));

    super.handleRequestBody(req, res);
}

From source file:de.qaware.chronix.solr.query.analysis.AnalysisHandler.java

License:Apache License

@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
    LOGGER.debug("Handling analysis request {}", req);
    //First check if the request should return documents => rows > 0
    SolrParams params = req.getParams();
    String rowsParam = params.get(CommonParams.ROWS, null);
    int rows = -1;
    if (rowsParam != null) {
        rows = Integer.parseInt(rowsParam);
    }/*from w  w  w .  j  ava2s  . com*/

    SolrDocumentList results = new SolrDocumentList();
    String[] filterQueries = req.getParams().getParams(CommonParams.FQ);

    //Do a query and collect them on the join function
    Map<String, List<SolrDocument>> collectedDocs = findDocuments(req,
            JoinFunctionEvaluator.joinFunction(filterQueries));

    //If now rows should returned, we only return the num found
    if (rows == 0) {
        results.setNumFound(collectedDocs.keySet().size());
    } else {
        //Otherwise return the aggregated time series
        long queryStart = Long.parseLong(params.get(ChronixQueryParams.QUERY_START_LONG));
        long queryEnd = Long.parseLong(params.get(ChronixQueryParams.QUERY_END_LONG));

        //We have an analysis query
        List<SolrDocument> aggregatedDocs = analyze(collectedDocs,
                AnalysisQueryEvaluator.buildAnalysis(filterQueries), queryStart, queryEnd);

        results.addAll(aggregatedDocs);
        results.setNumFound(aggregatedDocs.size());
    }
    rsp.add("response", results);
    LOGGER.debug("Sending response {}",
            rsp.getToLogAsString(String.join("-", filterQueries == null ? "" : "")) + "/");

}

From source file:edu.toronto.cs.cidb.solr.SolrScriptService.java

License:Open Source License

/**
 * Perform a search, falling back on the suggested spellchecked query if the original query fails to return any
 * results./*from  w  w  w  .  ja v  a2 s . com*/
 * 
 * @param params the Solr parameters to use, should contain at least a value for the "q" parameter; use
 *        {@link #getSolrQuery(String, int, int)} to get the proper parameter expected by this method
 * @return the list of matching documents, empty if there are no matching terms
 */
private SolrDocumentList search(MapSolrParams params) {
    try {
        QueryResponse response = this.server.query(params);
        SolrDocumentList results = response.getResults();
        if (results.size() == 0 && !response.getSpellCheckResponse().isCorrectlySpelled()) {
            String suggestedQuery = response.getSpellCheckResponse().getCollatedResult();
            // The spellcheck doesn't preserve the identifiers, manually
            // correct this
            suggestedQuery = suggestedQuery.replaceAll("term_category:hip", "term_category:HP");
            MapSolrParams newParams = new MapSolrParams(
                    getSolrQuery(suggestedQuery, params.get(CommonParams.SORT),
                            params.getInt(CommonParams.ROWS, -1), params.getInt(CommonParams.START, 0)));
            return this.server.query(newParams).getResults();
        } else {
            return results;
        }
    } catch (SolrServerException ex) {
        this.logger.error("Failed to search: {}", ex.getMessage(), ex);
    }
    return null;
}

From source file:edu.toronto.cs.cidb.solr.SolrScriptService.java

License:Open Source License

/**
 * Convert a Lucene query string into a map of Solr parameters. More specifically, places the input query under the
 * "q" parameter, and adds parameters for requesting a spellcheck result.
 *
 * @param query the lucene query string to use
 * @param sort the sort criteria ("fiel_name order')
 * @param rows the number of items to return, or -1 to use the default number of results
 * @param start the number of items to skip, i.e. the index of the first hit to return, 0-based
 * @return a map of Solr query parameter ready to be used for constructing a {@link MapSolrParams} object
 *//*from w  w w  .  ja  va2s .com*/
private Map<String, String> getSolrQuery(String query, String sort, int rows, int start) {
    Map<String, String> result = new HashMap<String, String>();
    result.put(CommonParams.START, start + "");
    if (rows > 0) {
        result.put(CommonParams.ROWS, rows + "");
    }
    result.put(CommonParams.Q, query);
    if (!StringUtils.isBlank(sort)) {
        result.put(CommonParams.SORT, sort);
    }
    result.put("spellcheck", Boolean.toString(true));
    result.put("spellcheck.collate", Boolean.toString(true));
    result.put("spellcheck.onlyMorePopular", Boolean.toString(true));
    return result;
}

From source file:edu.toronto.cs.phenotips.solr.AbstractSolrScriptService.java

License:Open Source License

/**
 * Convert a Lucene query string into a map of Solr parameters. More specifically, places the input query under the
 * "q" parameter, and adds parameters for requesting a spellcheck result.
 * /*from   w w w .  ja  v a2  s  . c om*/
 * @param query the lucene query string to use
 * @param sort the sort criteria ("fiel_name order')
 * @param rows the number of items to return, or -1 to use the default number of results
 * @param start the number of items to skip, i.e. the index of the first hit to return, 0-based
 * @return a map of Solr query parameter ready to be used for constructing a {@link MapSolrParams} object
 */
private Map<String, String> getSolrQuery(String query, String sort, int rows, int start) {
    Map<String, String> result = new HashMap<String, String>();
    result.put(CommonParams.START, start + "");
    if (rows > 0) {
        result.put(CommonParams.ROWS, rows + "");
    }
    result.put(CommonParams.Q, query);
    if (StringUtils.isNotBlank(sort)) {
        result.put(CommonParams.SORT, sort);
    }
    result.put("spellcheck", Boolean.toString(true));
    result.put("spellcheck.collate", Boolean.toString(true));
    return result;
}

From source file:edu.toronto.cs.phenotips.solr.OmimScriptService.java

License:Open Source License

/**
 * Prepare the map of parameters that can be passed to a Solr query, in order to get a list of diseases matching the
 * selected positive and negative phenotypes.
 *
 * @param phenotypes the list of already selected phenotypes
 * @param nphenotypes phenotypes that are not observed in the patient
 * @return the computed Solr query parameters
 *//* ww  w . j  av  a 2  s . c  o  m*/
private MapSolrParams prepareParams(Collection<String> phenotypes, Collection<String> nphenotypes) {
    Map<String, String> params = new HashMap<String, String>();
    String q = "symptom:" + StringUtils.join(phenotypes, " symptom:");
    if (nphenotypes.size() > 0) {
        q += "  not_symptom:" + StringUtils.join(nphenotypes, " not_symptom:");
    }
    params.put(CommonParams.Q, q.replaceAll("HP:", "HP\\\\:"));
    params.put(CommonParams.ROWS, "100");
    params.put(CommonParams.START, "0");
    params.put(CommonParams.DEBUG_QUERY, Boolean.toString(true));
    params.put(CommonParams.EXPLAIN_STRUCT, Boolean.toString(true));

    return new MapSolrParams(params);
}

From source file:io.vertigo.dynamo.plugins.search.solr.SolrStatement.java

License:Apache License

/**
 * @return Nombre de document indexs//from w w  w  .j ava  2s . co m
 */
public long count() {
    final SolrQuery solrQuery = new SolrQuery();
    solrQuery.set(CommonParams.ROWS, 0);
    solrQuery.setQuery("*:*");
    final QueryResponse queryResponse = executeQuery(solrQuery);
    return queryResponse.getResults().getNumFound();
}

From source file:io.vertigo.dynamo.plugins.search.solr.SolrStatement.java

License:Apache License

private static SolrQuery createSolrQuery(final SearchQuery searchQuery,
        final IndexFieldNameResolver indexFieldNameResolver, final FacetedQuery filtersQuery,
        final int rowsPerQuery) {
    final SolrQuery solrQuery = new SolrQuery();
    solrQuery.set(CommonParams.ROWS, rowsPerQuery);
    solrQuery.setFields(SolrDocumentCodec.URN, SolrDocumentCodec.FULL_RESULT);
    if (searchQuery.isSortActive()) {
        final DtField sortField = searchQuery.getIndexDefinition().getIndexDtDefinition()
                .getField(searchQuery.getSortField());
        final String indexSortFieldName = indexFieldNameResolver.obtainIndexFieldName(sortField);
        solrQuery.addSortField(indexSortFieldName, searchQuery.getSortAsc() ? ORDER.asc : ORDER.desc);
    }/*from w  w w .  ja v  a2 s  .c  o m*/
    //solrQuery.set(CommonParams.START, 0); //peut servir d'offset
    final StringBuilder query = new StringBuilder();
    if (searchQuery.isBoostMostRecent()) {
        appendBoostMostRecent(searchQuery, query);
    }
    query.append(translateToSolr(searchQuery.getListFilter(), indexFieldNameResolver));
    solrQuery.setQuery(query.toString());

    for (final ListFilter facetQuery : filtersQuery.getListFilters()) {
        solrQuery.addFilterQuery(translateToSolr(facetQuery, indexFieldNameResolver));
    }
    solrQuery.setHighlight(true);
    solrQuery.setParam("hl.fl", "*");
    solrQuery.setHighlightSnippets(3);
    solrQuery.setParam("hl.mergeContiguous", true);
    //Ci dessous : pour avoir les facettes avec un compteur de doc  0
    //Pour l'instant dsactiv car elles peuvent tre dduites des dfinitions de facettes sauf pour celles tires des mots du dictionnaires dont on ne maitrise pas la quantit
    //solrQuery.setParam("facet.missing", true);

    return solrQuery;
}

From source file:jp.aegif.nemaki.cmis.aspect.query.solr.SolrQueryProcessor.java

License:Open Source License

@Override
public ObjectList query(CallContext callContext, String repositoryId, String statement,
        Boolean searchAllVersions, Boolean includeAllowableActions, IncludeRelationships includeRelationships,
        String renditionFilter, BigInteger maxItems, BigInteger skipCount, ExtensionsData extension) {

    SolrServer solrServer = solrUtil.getSolrServer();
    // replacing backslashed for TIMESTAMP only
    Pattern time_p = Pattern.compile("(TIMESTAMP\\s?'[\\-\\d]*T\\d{2})\\\\:(\\d{2})\\\\:([\\.\\d]*Z')",
            Pattern.CASE_INSENSITIVE);
    Matcher time_m = time_p.matcher(statement);
    statement = time_m.replaceAll("$1:$2:$3");

    // TODO walker is required?

    QueryUtilStrict util = new QueryUtilStrict(statement, new CmisTypeManager(repositoryId, typeManager), null);
    QueryObject queryObject = util.getQueryObject();
    // Get where caluse as Tree
    Tree whereTree = null;//from  w ww .j a  v a 2  s  .c o  m
    try {
        util.processStatement();
        Tree tree = util.parseStatement();
        whereTree = extractWhereTree(tree);
    } catch (Exception e) {
        e.printStackTrace();
    }

    // Build solr statement of WHERE
    String whereQueryString = "";
    if (whereTree == null || whereTree.isNil()) {
        whereQueryString = "*:*";
    } else {
        try {
            SolrPredicateWalker solrPredicateWalker = new SolrPredicateWalker(repositoryId, queryObject,
                    solrUtil, contentService);
            Query whereQuery = solrPredicateWalker.walkPredicate(whereTree);
            whereQueryString = whereQuery.toString();
        } catch (Exception e) {
            e.printStackTrace();
            // TODO Output more detailed exception
            exceptionService.invalidArgument("Invalid CMIS SQL statement!");
        }
    }

    // Build solr query of FROM
    String fromQueryString = "";

    String repositoryQuery = "repository_id:" + repositoryId;

    fromQueryString += repositoryQuery + " AND ";
    TypeDefinition td = null;

    td = queryObject.getMainFromName();

    // includedInSupertypeQuery
    List<TypeDefinitionContainer> typeDescendants = typeManager.getTypesDescendants(repositoryId, td.getId(),
            BigInteger.valueOf(-1), false);
    Iterator<TypeDefinitionContainer> iterator = typeDescendants.iterator();
    List<String> tables = new ArrayList<String>();
    while (iterator.hasNext()) {
        TypeDefinition descendant = iterator.next().getTypeDefinition();
        if (td.getId() != descendant.getId()) {
            boolean isq = (descendant.isIncludedInSupertypeQuery() == null) ? false
                    : descendant.isIncludedInSupertypeQuery();
            if (!isq)
                continue;
        }
        String table = descendant.getQueryName();
        tables.add(table.replaceAll(":", "\\\\:"));
    }

    //      Term t = new Term(
    //            solrUtil.getPropertyNameInSolr(PropertyIds.OBJECT_TYPE_ID),
    //            StringUtils.join(tables, " "));
    //      fromQueryString += new TermQuery(t).toString();
    fromQueryString += "(" + solrUtil.getPropertyNameInSolr(repositoryId, PropertyIds.OBJECT_TYPE_ID) + ":"
            + StringUtils.join(tables,
                    " " + solrUtil.getPropertyNameInSolr(repositoryId, PropertyIds.OBJECT_TYPE_ID) + ":")
            + ")";

    // Execute query
    SolrQuery solrQuery = new SolrQuery();
    solrQuery.setQuery(whereQueryString);
    solrQuery.setFilterQueries(fromQueryString);

    logger.info(solrQuery.toString());
    logger.info("statement: " + statement);
    logger.info("skipCount: " + skipCount);
    logger.info("maxItems: " + maxItems);
    if (skipCount == null) {
        solrQuery.set(CommonParams.START, 0);
    } else {
        solrQuery.set(CommonParams.START, skipCount.intValue());
    }
    if (maxItems == null) {
        solrQuery.set(CommonParams.ROWS, 50);
    } else {
        solrQuery.set(CommonParams.ROWS, maxItems.intValue());
    }

    QueryResponse resp = null;
    try {
        resp = solrServer.query(solrQuery);
    } catch (SolrServerException e) {
        e.printStackTrace();
    }

    long numFound = 0;
    // Output search results to ObjectList
    if (resp != null && resp.getResults() != null && resp.getResults().getNumFound() != 0) {
        SolrDocumentList docs = resp.getResults();
        numFound = docs.getNumFound();

        List<Content> contents = new ArrayList<Content>();
        for (SolrDocument doc : docs) {
            String docId = (String) doc.getFieldValue("object_id");
            Content c = contentService.getContent(repositoryId, docId);

            // When for some reason the content is missed, pass through
            if (c == null) {
                logger.warn("[objectId=" + docId + "]It is missed in DB but still rests in Solr.");
            } else {
                contents.add(c);
            }

        }

        List<Lock> locks = threadLockService.readLocks(repositoryId, contents);
        try {
            threadLockService.bulkLock(locks);

            // Filter out by permissions
            List<Content> permitted = permissionService.getFiltered(callContext, repositoryId, contents);

            // Filter return value with SELECT clause
            Map<String, String> requestedWithAliasKey = queryObject.getRequestedPropertiesByAlias();
            String filter = null;
            if (!requestedWithAliasKey.keySet().contains("*")) {
                // Create filter(queryNames) from query aliases
                filter = StringUtils.join(requestedWithAliasKey.values(), ",");
            }

            // Build ObjectList
            String orderBy = orderBy(queryObject);
            ObjectList result = compileService.compileObjectDataListForSearchResult(callContext, repositoryId,
                    permitted, filter, includeAllowableActions, includeRelationships, renditionFilter, false,
                    maxItems, skipCount, false, orderBy, numFound);

            return result;

        } finally {
            threadLockService.bulkUnlock(locks);
        }
    } else {
        ObjectListImpl nullList = new ObjectListImpl();
        nullList.setHasMoreItems(false);
        nullList.setNumItems(BigInteger.ZERO);
        return nullList;
    }
}

From source file:lux.solr.XQueryComponent.java

License:Mozilla Public License

@Override
public void process(ResponseBuilder rb) throws IOException {
    if (rb.grouping()) {
        throw new SolrException(ErrorCode.BAD_REQUEST, "grouping not supported for XQuery");
    }/*from  w  w  w .  j a v  a2  s. c o m*/
    SolrQueryRequest req = rb.req;
    SolrParams params = req.getParams();
    if (!params.getBool(XQUERY_COMPONENT_NAME, true)) {
        // TODO -- what is this for? who would pass xquery=false??
        return;
    }
    int start = params.getInt(CommonParams.START, 1);
    int len = params.getInt(CommonParams.ROWS, -1);
    try {
        evaluateQuery(rb, start, len);
    } finally {
        solrIndexConfig.returnSerializer(serializer);
    }
}