List of usage examples for org.apache.solr.client.solrj SolrQuery setStart
public SolrQuery setStart(Integer start)
From source file:org.apache.nifi.processors.solr.QuerySolr.java
License:Apache License
@Override public void doOnTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { final ComponentLog logger = getLogger(); FlowFile flowFileOriginal = session.get(); FlowFile flowFileResponse;/* w ww.j av a 2s. com*/ if (flowFileOriginal == null) { if (context.hasNonLoopConnection()) { return; } flowFileResponse = session.create(); } else { flowFileResponse = session.create(flowFileOriginal); } final SolrQuery solrQuery = new SolrQuery(); final boolean isSolrCloud = SOLR_TYPE_CLOUD.equals(context.getProperty(SOLR_TYPE).getValue()); final String collection = context.getProperty(COLLECTION).evaluateAttributeExpressions(flowFileResponse) .getValue(); final StringBuilder transitUri = new StringBuilder("solr://"); transitUri.append(getSolrLocation()); if (isSolrCloud) { transitUri.append(":").append(collection); } final StopWatch timer = new StopWatch(false); try { solrQuery.setQuery(context.getProperty(SOLR_PARAM_QUERY).evaluateAttributeExpressions(flowFileResponse) .getValue()); solrQuery.setRequestHandler(context.getProperty(SOLR_PARAM_REQUEST_HANDLER) .evaluateAttributeExpressions(flowFileResponse).getValue()); if (context.getProperty(SOLR_PARAM_FIELD_LIST).isSet()) { for (final String field : context.getProperty(SOLR_PARAM_FIELD_LIST) .evaluateAttributeExpressions(flowFileResponse).getValue().split(",")) { solrQuery.addField(field.trim()); } } // Avoid ArrayIndexOutOfBoundsException due to incorrectly configured sorting try { if (context.getProperty(SOLR_PARAM_SORT).isSet()) { final List<SolrQuery.SortClause> sortings = new ArrayList<>(); for (final String sorting : context.getProperty(SOLR_PARAM_SORT) .evaluateAttributeExpressions(flowFileResponse).getValue().split(",")) { final String[] sortEntry = sorting.trim().split(" "); sortings.add(new SolrQuery.SortClause(sortEntry[0], sortEntry[1])); } solrQuery.setSorts(sortings); } } catch (Exception e) { throw new ProcessException("Error while parsing the sort clauses for the Solr query"); } final Integer startParam = context.getProperty(SOLR_PARAM_START).isSet() ? Integer.parseInt(context.getProperty(SOLR_PARAM_START) .evaluateAttributeExpressions(flowFileResponse).getValue()) : CommonParams.START_DEFAULT; solrQuery.setStart(startParam); final Integer rowParam = context.getProperty(SOLR_PARAM_ROWS).isSet() ? Integer.parseInt(context.getProperty(SOLR_PARAM_ROWS) .evaluateAttributeExpressions(flowFileResponse).getValue()) : CommonParams.ROWS_DEFAULT; solrQuery.setRows(rowParam); final Map<String, String[]> additionalSolrParams = SolrUtils.getRequestParams(context, flowFileResponse); final Set<String> searchComponents = extractSearchComponents(additionalSolrParams); solrQuery.add(new MultiMapSolrParams(additionalSolrParams)); final Map<String, String> attributes = new HashMap<>(); attributes.put(ATTRIBUTE_SOLR_CONNECT, getSolrLocation()); if (isSolrCloud) { attributes.put(ATTRIBUTE_SOLR_COLLECTION, collection); } attributes.put(ATTRIBUTE_SOLR_QUERY, solrQuery.toString()); if (flowFileOriginal != null) { flowFileOriginal = session.putAllAttributes(flowFileOriginal, attributes); } flowFileResponse = session.putAllAttributes(flowFileResponse, attributes); final boolean getEntireResults = RETURN_ALL_RESULTS .equals(context.getProperty(AMOUNT_DOCUMENTS_TO_RETURN).getValue()); boolean processFacetsAndStats = true; boolean continuePaging = true; while (continuePaging) { timer.start(); Map<String, String> responseAttributes = new HashMap<>(); responseAttributes.put(ATTRIBUTE_SOLR_START, solrQuery.getStart().toString()); responseAttributes.put(ATTRIBUTE_SOLR_ROWS, solrQuery.getRows().toString()); if (solrQuery.getStart() > UPPER_LIMIT_START_PARAM) { logger.warn( "The start parameter of Solr query {} exceeded the upper limit of {}. The query will not be processed " + "to avoid performance or memory issues on the part of Solr.", new Object[] { solrQuery.toString(), UPPER_LIMIT_START_PARAM }); flowFileResponse = session.putAllAttributes(flowFileResponse, responseAttributes); timer.stop(); break; } final QueryRequest req = new QueryRequest(solrQuery); if (isBasicAuthEnabled()) { req.setBasicAuthCredentials(getUsername(), getPassword()); } final QueryResponse response = req.process(getSolrClient()); timer.stop(); final Long totalNumberOfResults = response.getResults().getNumFound(); responseAttributes.put(ATTRIBUTE_SOLR_NUMBER_RESULTS, totalNumberOfResults.toString()); responseAttributes.put(ATTRIBUTE_CURSOR_MARK, response.getNextCursorMark()); responseAttributes.put(ATTRIBUTE_SOLR_STATUS, String.valueOf(response.getStatus())); responseAttributes.put(ATTRIBUTE_QUERY_TIME, String.valueOf(response.getQTime())); flowFileResponse = session.putAllAttributes(flowFileResponse, responseAttributes); if (response.getResults().size() > 0) { if (context.getProperty(RETURN_TYPE).getValue().equals(MODE_XML.getValue())) { flowFileResponse = session.write(flowFileResponse, SolrUtils.getOutputStreamCallbackToTransformSolrResponseToXml(response)); flowFileResponse = session.putAttribute(flowFileResponse, CoreAttributes.MIME_TYPE.key(), MIME_TYPE_XML); } else { final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER) .evaluateAttributeExpressions(flowFileResponse) .asControllerService(RecordSetWriterFactory.class); final RecordSchema schema = writerFactory.getSchema(flowFileResponse.getAttributes(), null); final RecordSet recordSet = SolrUtils.solrDocumentsToRecordSet(response.getResults(), schema); final StringBuffer mimeType = new StringBuffer(); final FlowFile flowFileResponseRef = flowFileResponse; flowFileResponse = session.write(flowFileResponse, out -> { try (final RecordSetWriter writer = writerFactory.createWriter(getLogger(), schema, out, flowFileResponseRef)) { writer.write(recordSet); writer.flush(); mimeType.append(writer.getMimeType()); } catch (SchemaNotFoundException e) { throw new ProcessException("Could not parse Solr response", e); } }); flowFileResponse = session.putAttribute(flowFileResponse, CoreAttributes.MIME_TYPE.key(), mimeType.toString()); } if (processFacetsAndStats) { if (searchComponents.contains(FacetParams.FACET)) { FlowFile flowFileFacets = session.create(flowFileResponse); flowFileFacets = session.write(flowFileFacets, out -> { try (final OutputStreamWriter osw = new OutputStreamWriter(out); final JsonWriter writer = new JsonWriter(osw)) { addFacetsFromSolrResponseToJsonWriter(response, writer); } }); flowFileFacets = session.putAttribute(flowFileFacets, CoreAttributes.MIME_TYPE.key(), MIME_TYPE_JSON); session.getProvenanceReporter().receive(flowFileFacets, transitUri.toString(), timer.getDuration(TimeUnit.MILLISECONDS)); session.transfer(flowFileFacets, FACETS); } if (searchComponents.contains(StatsParams.STATS)) { FlowFile flowFileStats = session.create(flowFileResponse); flowFileStats = session.write(flowFileStats, out -> { try (final OutputStreamWriter osw = new OutputStreamWriter(out); final JsonWriter writer = new JsonWriter(osw)) { addStatsFromSolrResponseToJsonWriter(response, writer); } }); flowFileStats = session.putAttribute(flowFileStats, CoreAttributes.MIME_TYPE.key(), MIME_TYPE_JSON); session.getProvenanceReporter().receive(flowFileStats, transitUri.toString(), timer.getDuration(TimeUnit.MILLISECONDS)); session.transfer(flowFileStats, STATS); } processFacetsAndStats = false; } } if (getEntireResults) { final Integer totalDocumentsReturned = solrQuery.getStart() + solrQuery.getRows(); if (totalDocumentsReturned < totalNumberOfResults) { solrQuery.setStart(totalDocumentsReturned); session.getProvenanceReporter().receive(flowFileResponse, transitUri.toString(), timer.getDuration(TimeUnit.MILLISECONDS)); session.transfer(flowFileResponse, RESULTS); flowFileResponse = session.create(flowFileResponse); } else { continuePaging = false; } } else { continuePaging = false; } } } catch (Exception e) { flowFileResponse = session.penalize(flowFileResponse); flowFileResponse = session.putAttribute(flowFileResponse, EXCEPTION, e.getClass().getName()); flowFileResponse = session.putAttribute(flowFileResponse, EXCEPTION_MESSAGE, e.getMessage()); session.transfer(flowFileResponse, FAILURE); logger.error("Failed to execute query {} due to {}. FlowFile will be routed to relationship failure", new Object[] { solrQuery.toString(), e }, e); if (flowFileOriginal != null) { flowFileOriginal = session.penalize(flowFileOriginal); } } if (!flowFileResponse.isPenalized()) { session.getProvenanceReporter().receive(flowFileResponse, transitUri.toString(), timer.getDuration(TimeUnit.MILLISECONDS)); session.transfer(flowFileResponse, RESULTS); } if (flowFileOriginal != null) { if (!flowFileOriginal.isPenalized()) { session.transfer(flowFileOriginal, ORIGINAL); } else { session.remove(flowFileOriginal); } } }
From source file:org.apache.ofbiz.solr.SolrProductSearch.java
License:Apache License
/** * Runs a query on the Solr Search Engine and returns the results. * <p>//from w ww.ja v a 2 s .c o m * This function only returns an object of type QueryResponse, so it is probably not a good idea to call it directly from within the * groovy files (As a decent example on how to use it, however, use keywordSearch instead). */ public static Map<String, Object> runSolrQuery(DispatchContext dctx, Map<String, Object> context) { // get Connection HttpSolrClient client = null; String solrIndexName = (String) context.get("indexName"); Map<String, Object> result; try { client = SolrUtil.getInstance().getHttpSolrClient(solrIndexName); // create Query Object SolrQuery solrQuery = new SolrQuery(); solrQuery.setQuery((String) context.get("query")); // solrQuery.setQueryType("dismax"); boolean faceted = (Boolean) context.get("facet"); if (faceted) { solrQuery.setFacet(faceted); solrQuery.addFacetField("manu"); solrQuery.addFacetField("cat"); solrQuery.setFacetMinCount(1); solrQuery.setFacetLimit(8); solrQuery.addFacetQuery("listPrice:[0 TO 50]"); solrQuery.addFacetQuery("listPrice:[50 TO 100]"); solrQuery.addFacetQuery("listPrice:[100 TO 250]"); solrQuery.addFacetQuery("listPrice:[250 TO 500]"); solrQuery.addFacetQuery("listPrice:[500 TO 1000]"); solrQuery.addFacetQuery("listPrice:[1000 TO 2500]"); solrQuery.addFacetQuery("listPrice:[2500 TO 5000]"); solrQuery.addFacetQuery("listPrice:[5000 TO 10000]"); solrQuery.addFacetQuery("listPrice:[10000 TO 50000]"); solrQuery.addFacetQuery("listPrice:[50000 TO *]"); } boolean spellCheck = (Boolean) context.get("spellcheck"); if (spellCheck) { solrQuery.setParam("spellcheck", spellCheck); } boolean highLight = (Boolean) context.get("highlight"); if (highLight) { solrQuery.setHighlight(highLight); solrQuery.setHighlightSimplePre("<span class=\"highlight\">"); solrQuery.addHighlightField("description"); solrQuery.setHighlightSimplePost("</span>"); solrQuery.setHighlightSnippets(2); } // Set additional Parameter // SolrQuery.ORDER order = SolrQuery.ORDER.desc; if (context.get("viewIndex") != null && (Integer) context.get("viewIndex") > 0) { solrQuery.setStart((Integer) context.get("viewIndex")); } if (context.get("viewSize") != null && (Integer) context.get("viewSize") > 0) { solrQuery.setRows((Integer) context.get("viewSize")); } // if ((List) context.get("queryFilter") != null && ((ArrayList<SolrDocument>) context.get("queryFilter")).size() > 0) { // List filter = (List) context.get("queryFilter"); // String[] tn = new String[filter.size()]; // Iterator it = filter.iterator(); // for (int i = 0; i < filter.size(); i++) { // tn[i] = (String) filter.get(i); // } // solrQuery.setFilterQueries(tn); // } String queryFilter = (String) context.get("queryFilter"); if (UtilValidate.isNotEmpty(queryFilter)) solrQuery.setFilterQueries(queryFilter.split(" ")); if ((String) context.get("returnFields") != null) { solrQuery.setFields((String) context.get("returnFields")); } // if((Boolean)context.get("sortByReverse"))order.reverse(); if ((String) context.get("sortBy") != null && ((String) context.get("sortBy")).length() > 0) { SolrQuery.ORDER order; if (!((Boolean) context.get("sortByReverse"))) order = SolrQuery.ORDER.asc; else order = SolrQuery.ORDER.desc; solrQuery.setSort(((String) context.get("sortBy")).replaceFirst("-", ""), order); } if ((String) context.get("facetQuery") != null) { solrQuery.addFacetQuery((String) context.get("facetQuery")); } QueryResponse rsp = client.query(solrQuery); result = ServiceUtil.returnSuccess(); result.put("queryResult", rsp); } catch (Exception e) { Debug.logError(e, e.getMessage(), module); result = ServiceUtil.returnError(e.toString()); } finally { if (client != null) { try { client.close(); } catch (IOException e) { // do nothing } } } return result; }
From source file:org.apache.ofbiz.solr.SolrUtil.java
License:Apache License
public static Map<String, Object> categoriesAvailable(String catalogId, String categoryId, String productId, String facetPrefix, boolean displayproducts, int viewIndex, int viewSize, String solrIndexName) { // create the data model Map<String, Object> result = new HashMap<String, Object>(); HttpSolrClient client = null;/*from ww w . jav a2 s . c o m*/ QueryResponse returnMap = new QueryResponse(); try { // do the basic query client = getHttpSolrClient(solrIndexName); // create Query Object String query = "inStock[1 TO *]"; if (categoryId != null) query += " +cat:" + categoryId; else if (productId != null) query += " +productId:" + productId; SolrQuery solrQuery = new SolrQuery(); solrQuery.setQuery(query); if (catalogId != null) solrQuery.setFilterQueries("catalog:" + catalogId); if (displayproducts) { if (viewSize > -1) { solrQuery.setRows(viewSize); } else solrQuery.setRows(50000); if (viewIndex > -1) { solrQuery.setStart(viewIndex); } } else { solrQuery.setFields("cat"); solrQuery.setRows(0); } if (UtilValidate.isNotEmpty(facetPrefix)) { solrQuery.setFacetPrefix(facetPrefix); } solrQuery.setFacetMinCount(0); solrQuery.setFacet(true); solrQuery.addFacetField("cat"); solrQuery.setFacetLimit(-1); Debug.logVerbose("solr: solrQuery: " + solrQuery, module); returnMap = client.query(solrQuery, METHOD.POST); result.put("rows", returnMap); result.put("numFound", returnMap.getResults().getNumFound()); } catch (Exception e) { Debug.logError(e.getMessage(), module); } return result; }
From source file:org.apache.ranger.solr.SolrUtil.java
License:Apache License
public QueryResponse searchResources(SearchCriteria searchCriteria, List<SearchField> searchFields, List<SortField> sortFieldList, SolrClient solrClient) { SolrQuery query = new SolrQuery(); query.setQuery("*:*"); if (searchCriteria.getParamList() != null) { // For now assuming there is only date field where range query will // be done. If we there are more than one, then we should create a // hashmap for each field name Date fromDate = null;// w ww . j a va 2 s . co m Date toDate = null; String dateFieldName = null; for (SearchField searchField : searchFields) { Object paramValue = searchCriteria.getParamValue(searchField.getClientFieldName()); if (paramValue == null || paramValue.toString().isEmpty()) { continue; } String fieldName = searchField.getFieldName(); if (paramValue instanceof Collection) { String fq = orList(fieldName, (Collection<?>) paramValue); if (fq != null) { query.addFilterQuery(fq); } } else if (searchField.getDataType() == SearchField.DATA_TYPE.DATE) { if (!(paramValue instanceof Date)) { logger.error("Search file is not of java object instanceof Date"); } else { if (searchField.getSearchType() == SEARCH_TYPE.GREATER_EQUAL_THAN || searchField.getSearchType() == SEARCH_TYPE.GREATER_THAN) { fromDate = (Date) paramValue; dateFieldName = fieldName; } else if (searchField.getSearchType() == SEARCH_TYPE.LESS_EQUAL_THAN || searchField.getSearchType() == SEARCH_TYPE.LESS_THAN) { toDate = (Date) paramValue; } } } else if (searchField.getSearchType() == SEARCH_TYPE.GREATER_EQUAL_THAN || searchField.getSearchType() == SEARCH_TYPE.GREATER_THAN || searchField.getSearchType() == SEARCH_TYPE.LESS_EQUAL_THAN || searchField.getSearchType() == SEARCH_TYPE.LESS_THAN) { // TODO: Need to handle range here } else { String fq = setField(fieldName, paramValue); if (searchField.getSearchType() == SEARCH_TYPE.PARTIAL) { fq = setFieldForPartialSearch(fieldName, paramValue); } if (fq != null) { query.addFilterQuery(fq); } } } if (fromDate != null || toDate != null) { String fq = setDateRange(dateFieldName, fromDate, toDate); if (fq != null) { query.addFilterQuery(fq); } } } setSortClause(searchCriteria, sortFieldList, query); query.setStart(searchCriteria.getStartIndex()); query.setRows(searchCriteria.getMaxRows()); // Fields to get // query.setFields("myClassType", "id", "score", "globalId"); if (logger.isDebugEnabled()) { logger.debug("SOLR QUERY=" + query.toString()); } QueryResponse response = runQuery(solrClient, query); if (response == null || response.getStatus() != 0) { logger.error("Error running query. query=" + query.toString() + ", response=" + response); throw restErrorUtil.createRESTException("Error running query", MessageEnums.ERROR_SYSTEM); } return response; }
From source file:org.bigsolr.hadoop.SolrInputFormat.java
License:Apache License
@Override public List<InputSplit> getSplits(JobContext context) throws IOException, InterruptedException { log.info("SolrInputFormat -> getSplits"); Configuration conf = context.getConfiguration(); String collectionName = conf.get(COLLECTION_NAME); int numSplits = context.getNumReduceTasks(); SolrServer solr = SolrOperations.getSolrServer(conf); final SolrQuery solrQuery = new SolrQuery(conf.get(SOLR_QUERY)); solrQuery.setFields(ID_FIELD);/*from w w w . j av a 2 s. co m*/ solrQuery.setRows(50); solrQuery.set("collection", collectionName); solrQuery.setStart(0); QueryResponse response; try { response = solr.query(solrQuery); } catch (final SolrServerException e) { throw new IOException(e); } int numResults = (int) response.getResults().getNumFound(); int numDocsPerSplit = (numResults / numSplits); int currentDoc = 0; List<InputSplit> splits = new ArrayList<InputSplit>(); for (int i = 0; i < numSplits - 1; i++) { splits.add(new SolrInputSplit(currentDoc, numDocsPerSplit)); currentDoc += numDocsPerSplit; } splits.add(new SolrInputSplit(currentDoc, numResults - currentDoc)); return splits; }
From source file:org.bigsolr.hadoop.SolrInputFormat.java
License:Apache License
@Override public org.apache.hadoop.mapred.InputSplit[] getSplits(org.apache.hadoop.mapred.JobConf conf, int numSplits) throws IOException { log.info("SolrInputFormat -> getSplits"); String collectionName = conf.get(COLLECTION_NAME); SolrServer solr = SolrOperations.getSolrServer(conf); final SolrQuery solrQuery = new SolrQuery(conf.get(SOLR_QUERY)); solrQuery.setFields(ID_FIELD);// w ww.j ava 2 s . co m solrQuery.setRows(50); solrQuery.set("collection", collectionName); solrQuery.setStart(0); QueryResponse response; try { response = solr.query(solrQuery); } catch (final SolrServerException e) { throw new IOException(e); } int numResults = (int) response.getResults().getNumFound(); int numDocsPerSplit = (numResults / numSplits); int currentDoc = 0; List<InputSplit> splits = new ArrayList<InputSplit>(); for (int i = 0; i < numSplits - 1; i++) { splits.add(new SolrInputSplit(currentDoc, numDocsPerSplit)); currentDoc += numDocsPerSplit; } splits.add(new SolrInputSplit(currentDoc, numResults - currentDoc)); return splits.toArray(new SolrInputSplit[splits.size()]); }
From source file:org.bigsolr.hadoop.SolrInputFormat.java
License:Apache License
@Override public RecordReader<NullWritable, SolrRecord> createRecordReader(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { log.info("SolrInputFormat -> createRecordReader"); Configuration conf = context.getConfiguration(); org.apache.hadoop.mapred.Reporter reporter = null; // Need to implement with heartbeat String collectionName = conf.get(COLLECTION_NAME); String fields = conf.get(FIELDS); SolrServer solr = SolrOperations.getSolrServer(conf); SolrInputSplit solrSplit = (SolrInputSplit) split; final int numDocs = (int) solrSplit.getLength(); final SolrQuery solrQuery = new SolrQuery(conf.get(SOLR_QUERY)); solrQuery.setFields(fields);//ww w . j a v a 2s . c o m solrQuery.set("collection", collectionName); solrQuery.setStart(solrSplit.getDocBegin()); solrQuery.setRows(numDocs); QueryResponse response; try { response = solr.query(solrQuery); } catch (final SolrServerException e) { throw new IOException(e); } final SolrDocumentList solrDocs = response.getResults(); return new SolrRecordReader(solrDocs, numDocs); }
From source file:org.bigsolr.hadoop.SolrInputFormat.java
License:Apache License
@Override public org.apache.hadoop.mapred.RecordReader<NullWritable, SolrRecord> getRecordReader( org.apache.hadoop.mapred.InputSplit split, org.apache.hadoop.mapred.JobConf conf, org.apache.hadoop.mapred.Reporter reporter) throws IOException { log.info("SolrInputFormat -> getRecordReader"); String collectionName = conf.get(COLLECTION_NAME); String fields = conf.get(FIELDS); SolrServer solr = SolrOperations.getSolrServer(conf); int numDocs = 0; SolrInputSplit solrSplit = (SolrInputSplit) split; try {// w w w. j a v a 2 s .c o m numDocs = (int) solrSplit.getLength(); } catch (final IOException e) { throw new IOException(e); } final SolrQuery solrQuery = new SolrQuery(conf.get(SOLR_QUERY)); solrQuery.setFields(fields); solrQuery.set("collection", collectionName); // Added solrQuery.setStart(solrSplit.getDocBegin()); solrQuery.setRows(numDocs); QueryResponse response = null; try { response = solr.query(solrQuery); } catch (final SolrServerException e) { throw new IOException(e); } final SolrDocumentList solrDocs = response.getResults(); return new SolrRecordReader(solrDocs, numDocs); }
From source file:org.codelibs.fess.service.SearchService.java
License:Apache License
public List<Map<String, Object>> getDocumentList(final String query, final int start, final int rows, final FacetInfo facetInfo, final GeoInfo geoInfo, final MoreLikeThisInfo mltInfo, final String[] responseFields, final String[] docValuesFields, final boolean forUser) { if (start > queryHelper.getMaxSearchResultOffset()) { throw new ResultOffsetExceededException("The number of result size is exceeded."); }//ww w.ja v a 2 s. c o m final long startTime = System.currentTimeMillis(); final SolrGroup solrGroup = solrGroupManager.getSolrGroup(QueryType.QUERY); QueryResponse queryResponse = null; final SolrQuery solrQuery = new SolrQuery(); final SearchQuery searchQuery = queryHelper.build(query, forUser); final String q = searchQuery.getQuery(); if (StringUtil.isNotBlank(q)) { // fields solrQuery.setFields(responseFields); // query solrQuery.setQuery(q); solrQuery.setStart(start); solrQuery.setRows(rows); solrQuery.set("mm", searchQuery.getMinimumShouldMatch()); solrQuery.set("defType", searchQuery.getDefType()); for (final Map.Entry<String, String[]> entry : queryHelper.getQueryParamMap().entrySet()) { solrQuery.set(entry.getKey(), entry.getValue()); } // filter query if (searchQuery.hasFilterQueries()) { solrQuery.addFilterQuery(searchQuery.getFilterQueries()); } // sort final SortField[] sortFields = searchQuery.getSortFields(); if (sortFields.length != 0) { for (final SortField sortField : sortFields) { solrQuery.addSort(sortField.getField(), Constants.DESC.equals(sortField.getOrder()) ? SolrQuery.ORDER.desc : SolrQuery.ORDER.asc); } } else if (queryHelper.hasDefaultSortFields()) { for (final SortField sortField : queryHelper.getDefaultSortFields()) { solrQuery.addSort(sortField.getField(), Constants.DESC.equals(sortField.getOrder()) ? SolrQuery.ORDER.desc : SolrQuery.ORDER.asc); } } // highlighting if (queryHelper.getHighlightingFields() != null && queryHelper.getHighlightingFields().length != 0) { for (final String hf : queryHelper.getHighlightingFields()) { solrQuery.addHighlightField(hf); } solrQuery.setHighlightSnippets(queryHelper.getHighlightSnippetSize()); } // shards if (queryHelper.getShards() != null) { solrQuery.setParam("shards", queryHelper.getShards()); } // geo if (geoInfo != null && geoInfo.isAvailable()) { solrQuery.addFilterQuery(geoInfo.toGeoQueryString()); final String additionalGeoQuery = queryHelper.getAdditionalGeoQuery(); if (StringUtil.isNotBlank(additionalGeoQuery)) { solrQuery.addFilterQuery(additionalGeoQuery); } } // facets if (facetInfo != null) { solrQuery.setFacet(true); if (facetInfo.field != null) { for (final String f : facetInfo.field) { if (queryHelper.isFacetField(f)) { solrQuery.addFacetField(f); } else { throw new FessSolrQueryException("EFESS0002", new Object[] { f }); } } } if (facetInfo.query != null) { for (final String fq : facetInfo.query) { final String facetQuery = queryHelper.buildFacetQuery(fq); if (StringUtil.isNotBlank(facetQuery)) { solrQuery.addFacetQuery(facetQuery); } else { throw new FessSolrQueryException("EFESS0003", new Object[] { fq, facetQuery }); } } } if (facetInfo.limit != null) { solrQuery.setFacetLimit(Integer.parseInt(facetInfo.limit)); } if (facetInfo.minCount != null) { solrQuery.setFacetMinCount(Integer.parseInt(facetInfo.minCount)); } if (facetInfo.missing != null) { solrQuery.setFacetMissing(Boolean.parseBoolean(facetInfo.missing)); } if (facetInfo.prefix != null) { solrQuery.setFacetPrefix(facetInfo.prefix); } if (facetInfo.sort != null && queryHelper.isFacetSortValue(facetInfo.sort)) { solrQuery.setFacetSort(facetInfo.sort); } } // mlt if (mltInfo != null) { final String mltField = queryHelper.getMoreLikeThisField(mltInfo.field); if (mltField != null) { solrQuery.set("mlt", true); if (mltInfo.count != null) { solrQuery.set("mlt.count", Integer.parseInt(mltInfo.count)); } solrQuery.set("mlt.fl", mltField); } } if (queryHelper.getTimeAllowed() >= 0) { solrQuery.setTimeAllowed(queryHelper.getTimeAllowed()); } final Set<Entry<String, String[]>> paramSet = queryHelper.getRequestParameterSet(); if (!paramSet.isEmpty()) { for (final Map.Entry<String, String[]> entry : paramSet) { solrQuery.set(entry.getKey(), entry.getValue()); } } if (docValuesFields != null) { for (final String docValuesField : docValuesFields) { solrQuery.add(Constants.DCF, docValuesField); } } queryResponse = solrGroup.query(solrQuery, SolrRequest.METHOD.POST); } final long execTime = System.currentTimeMillis() - startTime; final QueryResponseList queryResponseList = ComponentUtil.getQueryResponseList(); queryResponseList.init(queryResponse, rows); queryResponseList.setSearchQuery(q); queryResponseList.setSolrQuery(solrQuery.toString()); queryResponseList.setExecTime(execTime); return queryResponseList; }
From source file:org.craftercms.commerce.server.solr.AbstractSolrCRUDService.java
License:Open Source License
public ServiceResponse<T> findByQuery(@PathVariable String query, @PathVariable int offset, @PathVariable int maxResults) throws CrafterCommerceException { if (LOGGER.isDebugEnabled()) { LOGGER.debug("About to find entities for query: " + query); }/*from w w w .ja v a 2s . c om*/ try { String solrQueryStr = queryConverter.toSolrQuery(query); SolrQuery solrQuery = new SolrQuery(solrQueryStr); solrQuery.setStart(offset); solrQuery.setRows(maxResults); QueryResponse qResponse = solrServer.query(solrQuery); Set<T> entities = toEntities(qResponse); ServiceResponse<T> sResponse = new ServiceResponse<T>(); sResponse.setReturnedClass(getTypeArgument()); sResponse.setSuccess(true); sResponse.setEntities(entities); sResponse.setCount(qResponse.getResults().getNumFound()); String message = ReturnMessageProvider.findByQueryMessage(getTypeArgument(), query, entities.size()); sResponse.setMessage(message); return sResponse; } catch (Exception e) { LOGGER.error("Failed to find entities by query for query = " + query, e); return new ServiceResponse<T>(getTypeArgument(), false, e.getMessage()); } }