Example usage for org.apache.solr.client.solrj SolrQuery setTermsLimit

List of usage examples for org.apache.solr.client.solrj SolrQuery setTermsLimit

Introduction

In this page you can find the example usage for org.apache.solr.client.solrj SolrQuery setTermsLimit.

Prototype

public SolrQuery setTermsLimit(int limit) 

Source Link

Usage

From source file:com.frank.search.solr.core.TermsQueryParser.java

License:Apache License

protected void appendTermsOptionsToSolrQuery(TermsOptions options, SolrQuery solrQuery) {
    solrQuery.setTerms(true);//from w  ww  .j  a v a  2 s. c  om
    if (options.getLimit() >= 0) {
        solrQuery.setTermsLimit(options.getLimit());
    }
    if (options.getMaxCount() >= -1) {
        solrQuery.setTermsMaxCount(options.getMaxCount());
    }
    if (options.getMinCount() >= 0) {
        solrQuery.setTermsMinCount(options.getMinCount());
    }
    if (StringUtils.hasText(options.getPrefix())) {
        solrQuery.setTermsPrefix(options.getPrefix());
    }
    if (StringUtils.hasText(options.getRegex())) {
        solrQuery.setTermsRegex(options.getRegex());
    }
    if (options.getRegexFlag() != null) {
        solrQuery.setTermsRegexFlag(options.getRegexFlag().toString().toLowerCase());
    }
    if (options.getSort() != null) {
        solrQuery.setTermsSortString(options.getSort().toString().toLowerCase());
    }
    if (options.getUpperBoundTerm() != null) {
        solrQuery.setTermsUpper(options.getUpperBoundTerm().getTerm());
        solrQuery.setTermsUpperInclusive(options.getUpperBoundTerm().isInclude());
    }
    if (options.getLowerBoundTerm() != null) {
        solrQuery.setTermsUpper(options.getLowerBoundTerm().getTerm());
        solrQuery.setTermsUpperInclusive(options.getLowerBoundTerm().isInclude());
    }
    if (!options.isRaw()) {
        solrQuery.setTermsRaw(options.isRaw());
    }

}

From source file:com.temenos.interaction.commands.solr.TermsCommand.java

License:Open Source License

public Result execute(InteractionContext ctx) {

    MultivaluedMap<String, String> queryParams = ctx.getQueryParameters();
    String entityName = ctx.getCurrentState().getEntityName();

    try {/*  w  ww .ja v a 2 s. co m*/
        String queryStr = queryParams.getFirst("q");
        SolrQuery query = new SolrQuery();
        query.setRequestHandler("/terms");
        //         query.setFields("id", "name", "mnemonic", "address", "postcode");
        query.setQuery(queryStr);
        // TODO make these configurable
        query.addTermsField("name");
        query.addTermsField("mnemonic");
        query.setTermsPrefix(queryStr);
        query.setTermsSortString("count");
        query.setTerms(true);
        query.setTermsLimit(10);

        QueryResponse rsp = solrServer.query(query);
        ctx.setResource(buildCollectionResource(entityName, "name", rsp.getTermsResponse().getTermMap()));
        return Result.SUCCESS;
    } catch (SolrServerException e) {
        logger.error("An unexpected error occurred while querying Solr", e);
    }

    return Result.FAILURE;
}

From source file:fr.cnes.sitools.metacatalogue.resources.opensearch.OpensearchDescribeResource.java

License:Open Source License

/**
 * Create a description for the metacatalogue containing all the request parameters, their types and enumeration
 * /*from   w w w.ja va  2s  .  c o m*/
 * @return a description for the metacatalogue
 */
private Describe createDescribe() {
    Describe describe = new Describe();
    SolrServer server = SolRUtils.getSolRServer(solrCoreUrl);
    if (server == null) {
        throw new ResourceException(Status.SERVER_ERROR_INTERNAL,
                "Solr core : " + solrCoreUrl + " not reachable");
    }

    LukeRequest request = new LukeRequest();
    // request.setNumTerms(maxTopTerms);

    try {
        List<Filter> filters = new ArrayList<Filter>();
        LukeResponse response = request.process(server);
        int numDocs = response.getNumDocs();

        Map<String, LukeResponse.FieldInfo> fields = response.getFieldInfo();

        for (Entry<String, LukeResponse.FieldInfo> field : fields.entrySet()) {
            LukeResponse.FieldInfo fieldInfo = field.getValue();
            String fieldName = fieldInfo.getName();

            boolean indexed = false;

            EnumSet<FieldFlag> flags = FieldInfo.parseFlags(fieldInfo.getSchema());
            indexed = (flags != null && flags.contains(FieldFlag.INDEXED));

            if (indexed && addToDescription(fieldName)) {

                // make a terms query to get the top terms
                SolrQuery query = new SolrQuery();
                query.setRequestHandler("/terms");
                query.setTerms(true);
                query.addTermsField(fieldName);
                query.setTermsLimit(maxTopTerms);
                query.setTermsMinCount(1);

                QueryRequest termsRequest = new QueryRequest(query);
                TermsResponse termsResponse = termsRequest.process(server).getTermsResponse();

                List<Term> terms = termsResponse.getTerms(fieldName);

                Filter filter = new Filter();
                filter.setId(fieldName);
                filter.setTitle(fieldName);
                if (canBeCategorised(terms)) {
                    filter.setType(FilterType.enumeration);
                    filter.setPopulation(numDocs);
                    filter.setSon(createSons(terms));
                } else {
                    filter.setType(getFilterType(fieldInfo.getType()));
                }
                filters.add(filter);
            }
        }
        describe.setFilters(filters);
        return describe;
    } catch (SolrServerException e) {
        throw new ResourceException(Status.SERVER_ERROR_INTERNAL, e.getMessage(), e);
    } catch (IOException e) {
        throw new ResourceException(Status.SERVER_ERROR_INTERNAL, e.getMessage(), e);
    }
}

From source file:fr.cnes.sitools.metacatalogue.resources.suggest.OpensearchSuggestResource.java

License:Open Source License

@Get
public List<SuggestDTO> suggest(Variant variant) {
    String query = getRequest().getResourceRef().getQueryAsForm().getFirstValue("q");
    if (query == null || query.isEmpty()) {
        getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST, "No suggestion parameter");
        return null;
    }/*from   w ww  .j  a  v a 2 s.c o  m*/

    try {
        ThesaurusSearcher searcher = new ThesaurusSearcher(thesaurusName);
        List<SuggestDTO> suggests = new ArrayList<SuggestDTO>();

        List<Concept> concepts = searcher.searchNarrowersBroader(query + "*", getLanguage());
        for (Concept concept : concepts) {
            SuggestDTO suggestDTO = new SuggestDTO();
            suggestDTO.setSuggestion(concept.getProperties().get("prefLabelNarrower").toString());
            suggestDTO.setSuggestionAltLabel(concept.getProperties().get("altLabelNarrower").toString());
            suggests.add(suggestDTO);
        }

        // get suggestion number in the metacatalogue then
        if (suggests.size() > 0) {
            SolrServer server = getSolrServer(getContext());

            SolrQuery solrQuery = new SolrQuery();
            solrQuery.setRequestHandler("/terms");
            solrQuery.setTerms(true);
            solrQuery.setTermsLimit(-1);
            solrQuery.addTermsField(MetacatalogField._CONCEPTS.getField());

            QueryResponse rsp;
            try {
                QueryRequest request = new QueryRequest(solrQuery);
                rsp = request.process(server);
                TermsResponse termsResponse = rsp.getTermsResponse();
                List<TermsResponse.Term> terms = termsResponse.getTerms(MetacatalogField._CONCEPTS.getField());
                Map<String, Long> map = createMapFromTerms(terms);
                Long nb = null;
                for (SuggestDTO suggest : suggests) {
                    if (map != null) {
                        nb = map.get(suggest.getSuggestionAltLabel());
                    }
                    if (nb == null) {
                        suggest.setNb(0);
                    } else {
                        suggest.setNb(nb);
                    }
                }
            } catch (SolrServerException e) {
                getLogger().warning("Cannot access Solr server no suggestion number returned");
            }
        }

        return suggests;
    } catch (IOException e) {
        getResponse().setStatus(Status.SERVER_ERROR_INTERNAL, "Cannot read Thesaurs : " + thesaurusName);
        return null;
    }

}

From source file:org.sleuthkit.autopsy.keywordsearch.TermComponentQuery.java

License:Open Source License

protected SolrQuery createQuery() {
    final SolrQuery q = new SolrQuery();
    q.setRequestHandler(TERMS_HANDLER);/*  w w  w .j  ava  2 s .  c o m*/
    q.setTerms(true);
    q.setTermsLimit(TERMS_UNLIMITED);
    q.setTermsRegexFlag("case_insensitive"); //NON-NLS
    //q.setTermsLimit(200);
    //q.setTermsRegexFlag(regexFlag);
    //q.setTermsRaw(true);
    q.setTermsRegex(queryEscaped);
    q.addTermsField(TERMS_SEARCH_FIELD);
    q.setTimeAllowed(TERMS_TIMEOUT);

    return q;

}

From source file:org.sleuthkit.autopsy.keywordsearch.TermComponentQuery.java

License:Open Source License

@Override
public QueryResults performQuery() throws NoOpenCoreException {

    final SolrQuery q = createQuery();
    q.setShowDebugInfo(DEBUG);//from w ww  . j  a  va 2  s.  com
    q.setTermsLimit(MAX_TERMS_RESULTS);
    logger.log(Level.INFO, "Query: {0}", q.toString()); //NON-NLS
    terms = executeQuery(q);

    QueryResults results = new QueryResults(this, keywordList);
    int resultSize = 0;

    for (Term term : terms) {
        final String termStr = KeywordSearchUtil.escapeLuceneQuery(term.getTerm());

        LuceneQuery filesQuery = new LuceneQuery(keywordList, new Keyword(termStr, true));

        //filesQuery.setField(TERMS_SEARCH_FIELD);
        for (KeywordQueryFilter filter : filters) {
            //set filter
            //note: we can't set filter query on terms query
            //but setting filter query on terms results query will yield the same result
            filesQuery.addFilter(filter);
        }
        try {
            QueryResults subResults = filesQuery.performQuery();
            Set<KeywordHit> filesResults = new HashSet<>();
            for (Keyword key : subResults.getKeywords()) {
                List<KeywordHit> keyRes = subResults.getResults(key);
                resultSize += keyRes.size();
                filesResults.addAll(keyRes);
            }
            results.addResult(new Keyword(term.getTerm(), false), new ArrayList<>(filesResults));
        } catch (NoOpenCoreException e) {
            logger.log(Level.WARNING, "Error executing Solr query,", e); //NON-NLS
            throw e;
        } catch (RuntimeException e) {
            logger.log(Level.WARNING, "Error executing Solr query,", e); //NON-NLS
        }

    }

    //TODO limit how many results we store, not to hit memory limits
    logger.log(Level.INFO, "Regex # results: {0}", resultSize); //NON-NLS

    return results;
}

From source file:org.sleuthkit.autopsy.keywordsearch.TermsComponentQuery.java

License:Open Source License

/**
 * Executes the regex query as a two step operation. In the first step, the
 * Solr terms component is used to find any terms in the index that match
 * the regex. In the second step, term queries are executed for each matched
 * term to produce the set of keyword hits for the regex.
 *
 * @return A QueryResult object or null.
 *
 * @throws NoOpenCoreException//  w w w.  ja  v a2  s. c o m
 */
@Override
public QueryResults performQuery() throws KeywordSearchModuleException, NoOpenCoreException {
    /*
     * Do a query using the Solr terms component to find any terms in the
     * index that match the regex.
     */
    final SolrQuery termsQuery = new SolrQuery();
    termsQuery.setRequestHandler(SEARCH_HANDLER);
    termsQuery.setTerms(true);
    termsQuery.setTermsRegexFlag(CASE_INSENSITIVE);
    termsQuery.setTermsRegex(searchTerm);
    termsQuery.addTermsField(SEARCH_FIELD);
    termsQuery.setTimeAllowed(TERMS_SEARCH_TIMEOUT);
    termsQuery.setShowDebugInfo(DEBUG_FLAG);
    termsQuery.setTermsLimit(MAX_TERMS_QUERY_RESULTS);
    List<Term> terms = KeywordSearch.getServer().queryTerms(termsQuery).getTerms(SEARCH_FIELD);
    /*
     * Do a term query for each term that matched the regex.
     */
    QueryResults results = new QueryResults(this, keywordList);
    for (Term term : terms) {
        /*
         * If searching for credit card account numbers, do a Luhn check on
         * the term and discard it if it does not pass.
         */
        if (keyword.getArtifactAttributeType() == ATTRIBUTE_TYPE.TSK_CARD_NUMBER) {
            Matcher matcher = CREDIT_CARD_NUM_PATTERN.matcher(term.getTerm());
            matcher.find();
            final String ccn = CharMatcher.anyOf(" -").removeFrom(matcher.group("ccn"));
            if (false == CREDIT_CARD_NUM_LUHN_CHECK.isValid(ccn)) {
                continue;
            }
        }

        /*
         * Do an ordinary query with the escaped term and convert the query
         * results into a single list of keyword hits without duplicates.
         *
         * Note that the filters field appears to be unused. There is an old
         * comment here, what does it mean? "Note: we can't set filter query
         * on terms query but setting filter query on fileResults query will
         * yield the same result." The filter is NOT being added to the term
         * query.
         */
        String escapedTerm = KeywordSearchUtil.escapeLuceneQuery(term.getTerm());
        LuceneQuery termQuery = new LuceneQuery(keywordList, new Keyword(escapedTerm, true));
        filters.forEach(termQuery::addFilter); // This appears to be unused
        QueryResults termQueryResult = termQuery.performQuery();
        Set<KeywordHit> termHits = new HashSet<>();
        for (Keyword word : termQueryResult.getKeywords()) {
            termHits.addAll(termQueryResult.getResults(word));
        }
        results.addResult(new Keyword(term.getTerm(), false), new ArrayList<>(termHits));
    }
    return results;
}