List of usage examples for org.apache.solr.client.solrj SolrQuery setRows
public SolrQuery setRows(Integer rows)
From source file:org.entando.entando.plugins.jpsolrclient.aps.system.services.content.Searcher.java
License:Open Source License
public List<String> searchContentsId(String langCode, String word, Collection<String> allowedGroups, int maxResultSize) throws ApsSystemException { List<String> contentsId = null; try {//from ww w . j av a 2 s . c o m String queryString = this.createQueryString(langCode, word, allowedGroups); SolrQuery solrQuery = new SolrQuery(queryString); solrQuery.setRows(maxResultSize); contentsId = this.executeQuery(solrQuery); } catch (Throwable t) { throw new ApsSystemException("Error extracting response", t); } return contentsId; }
From source file:org.entrystore.repository.util.SolrSupport.java
License:Apache License
private long sendQueryForEntryURIs(SolrQuery query, Set<URI> result, SolrServer solrServer, int offset, int limit) { if (offset > -1) { query.setStart(offset);/*w w w . ja va2s. c o m*/ } if (limit > -1) { query.setRows(limit); } long hits = -1; Date before = new Date(); QueryResponse r = null; try { r = solrServer.query(query); SolrDocumentList docs = r.getResults(); hits = docs.getNumFound(); for (SolrDocument solrDocument : docs) { if (solrDocument.containsKey("uri")) { String uri = (String) solrDocument.getFieldValue("uri"); if (uri != null) { result.add(URI.create(uri)); } } } } catch (SolrServerException e) { log.error(e.getMessage()); } log.info("Solr query took " + (new Date().getTime() - before.getTime()) + " ms"); return hits; }
From source file:org.gbif.occurrence.download.file.OccurrenceFileWriterJob.java
/** * Executes the job.query and creates a data file that will contains the records from job.from to job.to positions. *//*from w ww . j av a 2 s . co m*/ @Override public Result call() throws IOException { // Creates a closer Closer closer = Closer.create(); // Calculates the amount of output records final int nrOfOutputRecords = fileJob.getTo() - fileJob.getFrom(); Map<UUID, Long> datasetUsages = Maps.newHashMap(); // Creates a search request instance using the search request that comes in the fileJob SolrQuery solrQuery = createSolrQuery(fileJob.getQuery()); try { ICsvMapWriter intCsvWriter = closer.register( new CsvMapWriter(new FileWriterWithEncoding(fileJob.getInterpretedDataFile(), Charsets.UTF_8), CsvPreference.TAB_PREFERENCE)); ICsvMapWriter verbCsvWriter = closer.register( new CsvMapWriter(new FileWriterWithEncoding(fileJob.getVerbatimDataFile(), Charsets.UTF_8), CsvPreference.TAB_PREFERENCE)); ICsvBeanWriter multimediaCsvWriter = closer.register( new CsvBeanWriter(new FileWriterWithEncoding(fileJob.getMultimediaDataFile(), Charsets.UTF_8), CsvPreference.TAB_PREFERENCE)); int recordCount = 0; while (recordCount < nrOfOutputRecords) { solrQuery.setStart(fileJob.getFrom() + recordCount); // Limit can't be greater than the maximum number of records assigned to this job solrQuery .setRows(recordCount + LIMIT > nrOfOutputRecords ? nrOfOutputRecords - recordCount : LIMIT); final QueryResponse response = solrServer.query(solrQuery); for (Iterator<SolrDocument> itResults = response.getResults().iterator(); itResults .hasNext(); recordCount++) { final Integer occKey = (Integer) itResults.next() .getFieldValue(OccurrenceSolrField.KEY.getFieldName()); // Writes the occurrence record obtained from HBase as Map<String,Object>. org.apache.hadoop.hbase.client.Result result = occurrenceMapReader.get(occKey); Map<String, String> occurrenceRecordMap = OccurrenceMapReader.buildOccurrenceMap(result); Map<String, String> verbOccurrenceRecordMap = OccurrenceMapReader .buildVerbatimOccurrenceMap(result); if (occurrenceRecordMap != null) { incrementDatasetUsage(datasetUsages, occurrenceRecordMap); intCsvWriter.write(occurrenceRecordMap, INT_COLUMNS); verbCsvWriter.write(verbOccurrenceRecordMap, VERB_COLUMNS); writeMediaObjects(multimediaCsvWriter, result, occKey); } else { LOG.error(String.format("Occurrence id %s not found!", occKey)); } } } } catch (Exception e) { Throwables.propagate(e); } finally { closer.close(); // Unlock the assigned lock. lock.unlock(); LOG.info("Lock released, job detail: {} ", fileJob.toString()); } return new Result(fileJob, datasetUsages); }
From source file:org.geotools.data.solr.FieldLayerMapper.java
License:Open Source License
@Override public List<String> createTypeNames(HttpSolrServer solrServer) throws Exception { List<String> names = new ArrayList<>(); SolrQuery query = new SolrQuery(); query.setQuery("*:*"); query.addFacetField(field);/*from ww w .j ava 2 s . co m*/ query.setFacet(true); query.setFacetMinCount(1); query.setFacetSort(FacetParams.FACET_SORT_INDEX); query.setRows(0); query.setParam("omitHeader", true); QueryResponse rsp = solrServer.query(query); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.log(Level.FINE, "SOLR query done: " + query.toString()); } List<Count> uniqueFacetFields = rsp.getFacetFields().get(0).getValues(); for (Count field : uniqueFacetFields) { names.add(field.getName()); } return names; }
From source file:org.geotools.data.solr.SolrDataStore.java
License:Open Source License
/** * Retrieve SOLR attribute for specific type <br/> * Two SOLR LukeRequest are needed to discover SOLR fields and theirs schema for dynamic and * static kinds. <br/>/*from ww w . ja va 2 s . c om*/ * For each discovered field a SOLR request is needed to verify if the field has no values in * the actual type, this information will be stored in {@link SolrAttribute#setEmpty}. <br/> * SolrJ not extracts information about uniqueKey so custom class * {@link ExtendedFieldSchemaInfo} is used. <br/> * MultiValued SOLR field is mapped as String type * * @param layerName the type to use to query the SOLR field {@link SolrDataStore#field} * * @see {@link SolrUtils#decodeSolrFieldType} * @see {@link ExtendedFieldSchemaInfo#ExtendedFieldSchemaInfo} * */ public ArrayList<SolrAttribute> getSolrAttributes(String layerName) { if (solrAttributes.isEmpty()) { solrAttributes = new ArrayList<SolrAttribute>(); try { LukeRequest lq = new LukeRequest(); lq.setShowSchema(true); LukeResponse processSchema = lq.process(solrServer); lq = new LukeRequest(); lq.setShowSchema(false); LukeResponse processField = lq.process(solrServer); Map<String, FieldInfo> fis = processField.getFieldInfo(); SortedSet<String> keys = new TreeSet<String>(fis.keySet()); for (String k : keys) { FieldInfo fieldInfo = fis.get(k); String name = fieldInfo.getName(); String type = fieldInfo.getType(); SolrQuery query = new SolrQuery(); query.setQuery("*:*"); query.setRows(0); query.addFilterQuery(this.field + ":*"); if (layerName != null && layerName.isEmpty()) { query.addFilterQuery(name + ":" + layerName); } else { query.addFilterQuery(name + ":*"); } QueryResponse rsp = solrServer.query(query); long founds = rsp.getResults().getNumFound(); FieldTypeInfo fty = processSchema.getFieldTypeInfo(type); if (fty != null) { Class<?> objType = SolrUtils.decodeSolrFieldType(fty.getClassName()); if (objType != null) { ExtendedFieldSchemaInfo extendedFieldSchemaInfo = new SolrUtils.ExtendedFieldSchemaInfo( processSchema, processField, name); SolrAttribute at = new SolrAttribute(name, objType); if (extendedFieldSchemaInfo.getUniqueKey()) { at.setPk(true); at.setUse(true); } if (extendedFieldSchemaInfo.getMultivalued() && !Geometry.class.isAssignableFrom(at.getType())) { at.setType(String.class); } at.setEmpty(founds == 0); solrAttributes.add(at); } else { if (LOGGER.isLoggable(Level.FINE)) { LOGGER.log(Level.FINE, "Skipping attribute " + fty.getName() + " as we don't know how to map its type to a java object " + fty.getClassName()); } } } } // Reorder fields: empty after List<BeanComparator> sortFields = Arrays.asList(new BeanComparator("empty"), new BeanComparator("name")); ComparatorChain multiSort = new ComparatorChain(sortFields); Collections.sort(solrAttributes, multiSort); } catch (Exception ex) { LOGGER.log(Level.SEVERE, ex.getMessage(), ex); } } return solrAttributes; }
From source file:org.geotools.data.solr.SolrDataStore.java
License:Open Source License
@Override protected List<Name> createTypeNames() throws IOException { try {//from ww w. j a v a 2 s . co m if (typeNames == null || typeNames.isEmpty()) { typeNames = new ArrayList<Name>(); SolrQuery query = new SolrQuery(); query.setQuery("*:*"); query.addFacetField(field); query.setFacet(true); query.setFacetMinCount(1); query.setFacetSort(FacetParams.FACET_SORT_INDEX); query.setRows(0); query.setParam("omitHeader", true); QueryResponse rsp = solrServer.query(query); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.log(Level.FINE, "SOLR query done: " + query.toString()); } List<Count> uniqueFacetFields = rsp.getFacetFields().get(0).getValues(); for (Count field : uniqueFacetFields) { typeNames.add(new NameImpl(namespaceURI, field.getName())); } } } catch (Exception ex) { LOGGER.log(Level.SEVERE, ex.getMessage(), ex); } return typeNames; }
From source file:org.geotools.data.solr.SolrDataStore.java
License:Open Source License
/** * Builds the SolrJ query with support of subset of fields, limit/offset, sorting, OGC filter * encoding and viewParams <br>//from w w w . j a va 2 s.co m * The SOLR query always need the order by PK field to enable pagination and efficient data * retrieving <br> * Currently only additional "q" and "fq" SOLR parameters can be passed using vireParams, this * conditions are added in AND with others * * @param featureType the feature type to query * @param q the OGC query to translate in SOLR request * * @see {@link Hints#VIRTUAL_TABLE_PARAMETERS} * */ protected SolrQuery select(SimpleFeatureType featureType, Query q) { SolrQuery query = new SolrQuery(); query.setParam("omitHeader", true); try { // Column names if (q.getPropertyNames() != null) { for (String prop : q.getPropertyNames()) { query.addField(prop); } } query.setQuery("*:*"); // Encode limit/offset, if necessary if (q.getStartIndex() != null && q.getStartIndex() >= 0) { query.setStart(q.getStartIndex()); } if (q.getMaxFeatures() > 0) { query.setRows(q.getMaxFeatures()); } // Sort ORDER naturalSortOrder = ORDER.asc; if (q.getSortBy() != null) { for (SortBy sort : q.getSortBy()) { if (sort.getPropertyName() != null) { query.addSort(sort.getPropertyName().getPropertyName(), sort.getSortOrder().equals(SortOrder.ASCENDING) ? ORDER.asc : ORDER.desc); } else { naturalSortOrder = sort.getSortOrder().equals(SortOrder.ASCENDING) ? ORDER.asc : ORDER.desc; } } } // Always add natural sort by PK to support pagination query.addSort(getPrimaryKey(featureType.getTypeName()).getName(), naturalSortOrder); // Encode OGC filer FilterToSolr f2s = initializeFilterToSolr(featureType); String fq = this.field + ":" + featureType.getTypeName(); Filter simplified = SimplifyingFilterVisitor.simplify(q.getFilter()); String ffq = f2s.encodeToString(simplified); if (ffq != null && !ffq.isEmpty()) { fq = fq + " AND " + ffq; } query.setFilterQueries(fq); // Add viewpPrams addViewparams(q, query); } catch (Exception e) { LOGGER.log(Level.SEVERE, e.getMessage(), e); } return query; }
From source file:org.geotools.data.solr.SolrDataStore.java
License:Open Source License
/** * Builds the SolrJ count query with support of limit/offset, OGC filter encoding and viewParams <br> * Currently only additional "q" and "fq" SOLR parameters can be passed using viewParams, this * conditions are added in AND with others * //from w ww . java2s. c o m * @param featureType the feature type to query * @param q the OGC query to translate in SOLR request * * @see {@link Hints#VIRTUAL_TABLE_PARAMETERS} * */ protected SolrQuery count(SimpleFeatureType featureType, Query q) { SolrQuery query = new SolrQuery(); query.setParam("omitHeader", true); query.setQuery("*:*"); query.setFields(this.getPrimaryKey(featureType.getName().getLocalPart()).getName()); try { // Encode limit/offset, if necessary if (q.getStartIndex() != null && q.getStartIndex() >= 0) { query.setStart(q.getStartIndex()); } query.setRows(0); // Encode OGC filer FilterToSolr f2s = initializeFilterToSolr(featureType); String fq = this.field + ":" + featureType.getTypeName(); String ffq = f2s.encodeToString(q.getFilter()); if (ffq != null && !ffq.isEmpty()) { fq = fq + " AND " + ffq; } query.setFilterQueries(fq); // Add viewparams parameters addViewparams(q, query); } catch (Exception e) { LOGGER.log(Level.SEVERE, e.getMessage(), e); } return query; }
From source file:org.geotools.data.solr.SolrFeatureReader.java
License:Open Source License
private String getCursorMarkForStart(HttpSolrServer server, SolrQuery solrQuery) throws SolrServerException { Integer prevRows = solrQuery.getRows(); solrQuery.setRows(solrQuery.getStart()); solrQuery.setStart(0);/* w ww . j av a2s . com*/ solrQuery.set(CursorMarkParams.CURSOR_MARK_PARAM, CursorMarkParams.CURSOR_MARK_START); QueryResponse rsp = server.query(solrQuery); if (this.solrDataStore.getLogger().isLoggable(Level.FINE)) { this.solrDataStore.getLogger().log(Level.FINE, "SOLR query done: " + solrQuery.toString()); } String nextC = rsp.getNextCursorMark(); solrQuery.setRows(prevRows); return nextC; }
From source file:org.gss_project.gss.server.ejb.ExternalAPIBean.java
License:Open Source License
/** * Performs the search on the solr server and returns the results *//from ww w.ja va 2s.com * * @param userId * @param query * @param luceneQuery * @return a List of FileHeader objects */ public SearchResult search(Long userId, String query, int start, boolean luceneQuery) throws ObjectNotFoundException { if (userId == null) throw new ObjectNotFoundException("No user specified"); if (query == null) throw new ObjectNotFoundException("No query specified"); final int maxRows = getConfiguration().getInt("searchResultsPerPage", 25); List<FileHeader> result = new ArrayList<FileHeader>(); try { CommonsHttpSolrServer solr = new CommonsHttpSolrServer(getConfiguration().getString("solr.url")); List<Group> groups = dao.getGroupsContainingUser(userId); String escapedQuery = luceneQuery ? normalizeSearchQuery(query) : escapeCharacters(normalizeSearchQuery(query)); String constructedQuery = escapedQuery + " AND (public: true OR ureaders: " + userId; if (!groups.isEmpty()) { constructedQuery += " OR ("; for (int i = 0; i < groups.size(); i++) { Group g = groups.get(i); constructedQuery += "greaders :" + g.getId(); if (i < groups.size() - 1) constructedQuery += " OR "; } constructedQuery += ")"; } constructedQuery += ")"; SolrQuery solrQuery = new SolrQuery(constructedQuery); solrQuery.setRows(maxRows); if (start > 0) solrQuery.setStart(start); QueryResponse response = solr.query(solrQuery); SolrDocumentList results = response.getResults(); if (results.getNumFound() > maxRows && start < 0) { solrQuery.setRows(Integer.valueOf((int) results.getNumFound())); response = solr.query(solrQuery); results = response.getResults(); } for (SolrDocument d : results) { Long id = Long.valueOf((String) d.getFieldValue("id")); try { FileHeader f = dao.getEntityById(FileHeader.class, id); result.add(f); } catch (ObjectNotFoundException e) { logger.warn("Search result id " + id + " cannot be found", e); } } return new SearchResult(results.getNumFound(), result); } catch (MalformedURLException e) { logger.error(e); throw new EJBException(e); } catch (SolrServerException e) { logger.error(e); throw new EJBException(e); } }