List of usage examples for org.apache.solr.client.solrj SolrQuery setFacet
public SolrQuery setFacet(boolean b)
From source file:org.dspace.app.webui.cris.servlet.ProfileResearcherNetworkServlet.java
public Map<String, Integer> getRelations(String from, String to) throws SearchServiceException { Map<String, Integer> result = new HashMap<String, Integer>(); SolrQuery solrQuery = new SolrQuery(); Matcher matcher = patternRP.matcher(from); String field1 = ""; String field2 = ""; if (matcher.find()) { field1 = "focus_auth"; } else {/*from w w w . j a v a 2 s . co m*/ field1 = "focus_val"; } matcher = patternRP.matcher(to); if (matcher.find()) { field2 = "focus_auth"; } else { field2 = "focus_val"; } solrQuery.setQuery( field1 + ":\"" + from + "\" AND " + field2 + ":\"" + to + "\"" ); solrQuery.setFacet(true); solrQuery.addFacetField("type"); solrQuery.setFacetLimit(Integer.MAX_VALUE); solrQuery.setFacetMinCount(1); solrQuery.setRows(0); QueryResponse rsp = service.search(solrQuery); FacetField facets = rsp.getFacetField("type"); for (Count facet : facets.getValues()) { result.put(facet.getName(), Integer.valueOf((int) facet.getCount())); } return result; }
From source file:org.dspace.authority.AuthoritySolrServiceImpl.java
License:BSD License
/** * Retrieves all the metadata fields which are indexed in the authority control * @return a list of metadata fields/*from www . j a va 2s .c o m*/ */ public List<String> getAllIndexedMetadataFields() throws Exception { SolrQuery solrQuery = new SolrQuery(); solrQuery.setQuery("*:*"); solrQuery.setFacet(true); solrQuery.addFacetField("field"); QueryResponse response = getSolr().query(solrQuery); List<String> results = new ArrayList<String>(); FacetField facetField = response.getFacetField("field"); if (facetField != null) { List<FacetField.Count> values = facetField.getValues(); if (values != null) { for (FacetField.Count facetValue : values) { if (facetValue != null && facetValue.getName() != null) { results.add(facetValue.getName()); } } } } return results; }
From source file:org.dspace.statistics.SolrLogger.java
License:BSD License
public static QueryResponse query(String query, String filterQuery, String facetField, int rows, int max, String dateType, String dateStart, String dateEnd, List<String> facetQueries, String sort, boolean ascending) throws SolrServerException { if (solr == null) { return null; }// w w w .j a va2s . c om // System.out.println("QUERY"); SolrQuery solrQuery = new SolrQuery().setRows(rows).setQuery(query).setFacetMinCount(1); addAdditionalSolrYearCores(solrQuery); // Set the date facet if present if (dateType != null) { solrQuery.setParam("facet.date", "time"). // EXAMPLE: NOW/MONTH+1MONTH setParam("facet.date.end", "NOW/" + dateType + dateEnd + dateType) .setParam("facet.date.gap", "+1" + dateType). // EXAMPLE: NOW/MONTH-" + nbMonths + "MONTHS setParam("facet.date.start", "NOW/" + dateType + dateStart + dateType + "S").setFacet(true); } if (facetQueries != null) { for (int i = 0; i < facetQueries.size(); i++) { String facetQuery = facetQueries.get(i); solrQuery.addFacetQuery(facetQuery); } if (0 < facetQueries.size()) { solrQuery.setFacet(true); } } if (facetField != null) { solrQuery.addFacetField(facetField); } // Set the top x of if present if (max != -1) { solrQuery.setFacetLimit(max); } // A filter is used instead of a regular query to improve // performance and ensure the search result ordering will // not be influenced // Choose to filter by the Legacy spider IP list (may get too long to properly filter all IP's if (ConfigurationManager.getBooleanProperty("solr-statistics", "query.filter.spiderIp", false)) { solrQuery.addFilterQuery(getIgnoreSpiderIPs()); } // Choose to filter by isBot field, may be overriden in future // to allow views on stats based on bots. if (ConfigurationManager.getBooleanProperty("solr-statistics", "query.filter.isBot", true)) { solrQuery.addFilterQuery("-isBot:true"); } if (sort != null) { solrQuery.setSortField(sort, (ascending ? SolrQuery.ORDER.asc : SolrQuery.ORDER.desc)); } String bundles; if ((bundles = ConfigurationManager.getProperty("solr-statistics", "query.filter.bundles")) != null && 0 < bundles.length()) { /** * The code below creates a query that will allow only records which do not have a bundlename * (items, collections, ...) or bitstreams that have a configured bundle name */ StringBuffer bundleQuery = new StringBuffer(); //Also add the possibility that if no bundle name is there these results will also be returned ! bundleQuery.append("-(bundleName:[* TO *]"); String[] split = bundles.split(","); for (int i = 0; i < split.length; i++) { String bundle = split[i].trim(); bundleQuery.append("-bundleName:").append(bundle); if (i != split.length - 1) { bundleQuery.append(" AND "); } } bundleQuery.append(")"); solrQuery.addFilterQuery(bundleQuery.toString()); } if (filterQuery != null) { solrQuery.addFilterQuery(filterQuery); } QueryResponse response; try { // solr.set response = solr.query(solrQuery); } catch (SolrServerException e) { System.err.println("Error using query " + query); throw e; } return response; }
From source file:org.dspace.statistics.SolrLogger.java
License:BSD License
public static void shardSolrIndex() throws IOException, SolrServerException { /*//from ww w . j av a 2s . c om Start by faceting by year so we can include each year in a separate core ! */ SolrQuery yearRangeQuery = new SolrQuery(); yearRangeQuery.setQuery("*:*"); yearRangeQuery.setRows(0); yearRangeQuery.setFacet(true); yearRangeQuery.add(FacetParams.FACET_RANGE, "time"); //We go back to 2000 the year 2000, this is a bit overkill but this way we ensure we have everything //The alternative would be to sort but that isn't recommended since it would be a very costly query ! yearRangeQuery.add(FacetParams.FACET_RANGE_START, "NOW/YEAR-" + (Calendar.getInstance().get(Calendar.YEAR) - 2000) + "YEARS"); //Add the +0year to ensure that we DO NOT include the current year yearRangeQuery.add(FacetParams.FACET_RANGE_END, "NOW/YEAR+0YEARS"); yearRangeQuery.add(FacetParams.FACET_RANGE_GAP, "+1YEAR"); yearRangeQuery.add(FacetParams.FACET_MINCOUNT, String.valueOf(1)); //Create a temp directory to store our files in ! File tempDirectory = new File( ConfigurationManager.getProperty("dspace.dir") + File.separator + "temp" + File.separator); tempDirectory.mkdirs(); QueryResponse queryResponse = solr.query(yearRangeQuery); //We only have one range query ! List<RangeFacet.Count> yearResults = queryResponse.getFacetRanges().get(0).getCounts(); for (RangeFacet.Count count : yearResults) { long totalRecords = count.getCount(); //Create a range query from this ! //We start with out current year DCDate dcStart = new DCDate(count.getValue()); Calendar endDate = Calendar.getInstance(); //Advance one year for the start of the next one ! endDate.setTime(dcStart.toDate()); endDate.add(Calendar.YEAR, 1); DCDate dcEndDate = new DCDate(endDate.getTime()); StringBuilder filterQuery = new StringBuilder(); filterQuery.append("time:(["); filterQuery.append(ClientUtils.escapeQueryChars(dcStart.toString())); filterQuery.append(" TO "); filterQuery.append(ClientUtils.escapeQueryChars(dcEndDate.toString())); filterQuery.append("]"); //The next part of the filter query excludes the content from midnight of the next year ! filterQuery.append(" NOT ").append(ClientUtils.escapeQueryChars(dcEndDate.toString())); filterQuery.append(")"); Map<String, String> yearQueryParams = new HashMap<String, String>(); yearQueryParams.put(CommonParams.Q, "*:*"); yearQueryParams.put(CommonParams.ROWS, String.valueOf(10000)); yearQueryParams.put(CommonParams.FQ, filterQuery.toString()); yearQueryParams.put(CommonParams.WT, "csv"); //Start by creating a new core String coreName = "statistics-" + dcStart.getYear(); HttpSolrServer statisticsYearServer = createCore(solr, coreName); System.out.println("Moving: " + totalRecords + " into core " + coreName); log.info("Moving: " + totalRecords + " records into core " + coreName); List<File> filesToUpload = new ArrayList<File>(); for (int i = 0; i < totalRecords; i += 10000) { String solrRequestUrl = solr.getBaseURL() + "/select"; solrRequestUrl = generateURL(solrRequestUrl, yearQueryParams); HttpGet get = new HttpGet(solrRequestUrl); HttpResponse response = new DefaultHttpClient().execute(get); InputStream csvInputstream = response.getEntity().getContent(); //Write the csv ouput to a file ! File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYear() + "." + i + ".csv"); FileUtils.copyInputStreamToFile(csvInputstream, csvFile); filesToUpload.add(csvFile); //Add 10000 & start over again yearQueryParams.put(CommonParams.START, String.valueOf((i + 10000))); } for (File tempCsv : filesToUpload) { //Upload the data in the csv files to our new solr core ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest( "/update/csv"); contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8"); contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8"); statisticsYearServer.request(contentStreamUpdateRequest); } statisticsYearServer.commit(true, true); //Delete contents of this year from our year query ! solr.deleteByQuery(filterQuery.toString()); solr.commit(true, true); log.info("Moved " + totalRecords + " records into core: " + coreName); } FileUtils.deleteDirectory(tempDirectory); }
From source file:org.dspace.statistics.SolrLoggerServiceImpl.java
License:BSD License
@Override public QueryResponse query(String query, String filterQuery, String facetField, int rows, int max, String dateType, String dateStart, String dateEnd, List<String> facetQueries, String sort, boolean ascending) throws SolrServerException { if (solr == null) { return null; }//ww w. j a v a 2 s. c o m // System.out.println("QUERY"); SolrQuery solrQuery = new SolrQuery().setRows(rows).setQuery(query).setFacetMinCount(1); addAdditionalSolrYearCores(solrQuery); // Set the date facet if present if (dateType != null) { solrQuery.setParam("facet.date", "time"). // EXAMPLE: NOW/MONTH+1MONTH setParam("facet.date.end", "NOW/" + dateType + dateEnd + dateType) .setParam("facet.date.gap", "+1" + dateType). // EXAMPLE: NOW/MONTH-" + nbMonths + "MONTHS setParam("facet.date.start", "NOW/" + dateType + dateStart + dateType + "S").setFacet(true); } if (facetQueries != null) { for (int i = 0; i < facetQueries.size(); i++) { String facetQuery = facetQueries.get(i); solrQuery.addFacetQuery(facetQuery); } if (0 < facetQueries.size()) { solrQuery.setFacet(true); } } if (facetField != null) { solrQuery.addFacetField(facetField); } // Set the top x of if present if (max != -1) { solrQuery.setFacetLimit(max); } // A filter is used instead of a regular query to improve // performance and ensure the search result ordering will // not be influenced // Choose to filter by the Legacy spider IP list (may get too long to properly filter all IP's if (configurationService.getBooleanProperty("solr-statistics.query.filter.spiderIp", false)) { solrQuery.addFilterQuery(getIgnoreSpiderIPs()); } // Choose to filter by isBot field, may be overriden in future // to allow views on stats based on bots. if (configurationService.getBooleanProperty("solr-statistics.query.filter.isBot", true)) { solrQuery.addFilterQuery("-isBot:true"); } if (sort != null) { solrQuery.setSortField(sort, (ascending ? SolrQuery.ORDER.asc : SolrQuery.ORDER.desc)); } String[] bundles = configurationService.getArrayProperty("solr-statistics.query.filter.bundles"); if (bundles != null && bundles.length > 0) { /** * The code below creates a query that will allow only records which do not have a bundlename * (items, collections, ...) or bitstreams that have a configured bundle name */ StringBuffer bundleQuery = new StringBuffer(); //Also add the possibility that if no bundle name is there these results will also be returned ! bundleQuery.append("-(bundleName:[* TO *]"); for (int i = 0; i < bundles.length; i++) { String bundle = bundles[i].trim(); bundleQuery.append("-bundleName:").append(bundle); if (i != bundles.length - 1) { bundleQuery.append(" AND "); } } bundleQuery.append(")"); solrQuery.addFilterQuery(bundleQuery.toString()); } if (filterQuery != null) { solrQuery.addFilterQuery(filterQuery); } QueryResponse response; try { // solr.set response = solr.query(solrQuery); } catch (SolrServerException e) { System.err.println("Error using query " + query); throw e; } return response; }
From source file:org.dspace.statistics.SolrLoggerServiceImpl.java
License:BSD License
@Override public void shardSolrIndex() throws IOException, SolrServerException { /*/*ww w.ja v a 2 s. c o m*/ Start by faceting by year so we can include each year in a separate core ! */ SolrQuery yearRangeQuery = new SolrQuery(); yearRangeQuery.setQuery("*:*"); yearRangeQuery.setRows(0); yearRangeQuery.setFacet(true); yearRangeQuery.add(FacetParams.FACET_RANGE, "time"); //We go back to 2000 the year 2000, this is a bit overkill but this way we ensure we have everything //The alternative would be to sort but that isn't recommended since it would be a very costly query ! yearRangeQuery.add(FacetParams.FACET_RANGE_START, "NOW/YEAR-" + (Calendar.getInstance().get(Calendar.YEAR) - 2000) + "YEARS"); //Add the +0year to ensure that we DO NOT include the current year yearRangeQuery.add(FacetParams.FACET_RANGE_END, "NOW/YEAR+0YEARS"); yearRangeQuery.add(FacetParams.FACET_RANGE_GAP, "+1YEAR"); yearRangeQuery.add(FacetParams.FACET_MINCOUNT, String.valueOf(1)); //Create a temp directory to store our files in ! File tempDirectory = new File( configurationService.getProperty("dspace.dir") + File.separator + "temp" + File.separator); tempDirectory.mkdirs(); QueryResponse queryResponse = solr.query(yearRangeQuery); //We only have one range query ! List<RangeFacet.Count> yearResults = queryResponse.getFacetRanges().get(0).getCounts(); for (RangeFacet.Count count : yearResults) { long totalRecords = count.getCount(); //Create a range query from this ! //We start with out current year DCDate dcStart = new DCDate(count.getValue()); Calendar endDate = Calendar.getInstance(); //Advance one year for the start of the next one ! endDate.setTime(dcStart.toDate()); endDate.add(Calendar.YEAR, 1); DCDate dcEndDate = new DCDate(endDate.getTime()); StringBuilder filterQuery = new StringBuilder(); filterQuery.append("time:(["); filterQuery.append(ClientUtils.escapeQueryChars(dcStart.toString())); filterQuery.append(" TO "); filterQuery.append(ClientUtils.escapeQueryChars(dcEndDate.toString())); filterQuery.append("]"); //The next part of the filter query excludes the content from midnight of the next year ! filterQuery.append(" NOT ").append(ClientUtils.escapeQueryChars(dcEndDate.toString())); filterQuery.append(")"); Map<String, String> yearQueryParams = new HashMap<String, String>(); yearQueryParams.put(CommonParams.Q, "*:*"); yearQueryParams.put(CommonParams.ROWS, String.valueOf(10000)); yearQueryParams.put(CommonParams.FQ, filterQuery.toString()); yearQueryParams.put(CommonParams.WT, "csv"); //Start by creating a new core String coreName = "statistics-" + dcStart.getYear(); HttpSolrServer statisticsYearServer = createCore(solr, coreName); System.out.println("Moving: " + totalRecords + " into core " + coreName); log.info("Moving: " + totalRecords + " records into core " + coreName); List<File> filesToUpload = new ArrayList<File>(); for (int i = 0; i < totalRecords; i += 10000) { String solrRequestUrl = solr.getBaseURL() + "/select"; solrRequestUrl = generateURL(solrRequestUrl, yearQueryParams); HttpGet get = new HttpGet(solrRequestUrl); HttpResponse response = new DefaultHttpClient().execute(get); InputStream csvInputstream = response.getEntity().getContent(); //Write the csv ouput to a file ! File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYear() + "." + i + ".csv"); FileUtils.copyInputStreamToFile(csvInputstream, csvFile); filesToUpload.add(csvFile); //Add 10000 & start over again yearQueryParams.put(CommonParams.START, String.valueOf((i + 10000))); } for (File tempCsv : filesToUpload) { //Upload the data in the csv files to our new solr core ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest( "/update/csv"); contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8"); contentStreamUpdateRequest.setParam("skip", "_version_"); contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8"); statisticsYearServer.request(contentStreamUpdateRequest); } statisticsYearServer.commit(true, true); //Delete contents of this year from our year query ! solr.deleteByQuery(filterQuery.toString()); solr.commit(true, true); log.info("Moved " + totalRecords + " records into core: " + coreName); } FileUtils.deleteDirectory(tempDirectory); }
From source file:org.dspace.util.SolrImportExport.java
License:BSD License
/** * Exports documents from the given index to the specified target directory in batches of #ROWS_PER_FILE, starting at fromWhen (or all documents). * See #makeExportFilename for the file names that are generated. * * @param indexName The index to export. * @param toDir The target directory for the export. Will be created if it doesn't exist yet. The directory must be writeable. * @param solrUrl The solr URL for the index to export. Must not be null. * @param timeField The time field to use for sorting the export. Must not be null. * @param fromWhen Optionally, from when to export. See options for allowed values. If null or empty, all documents will be exported. * @throws SolrServerException if there is a problem with exporting the index. * @throws IOException if there is a problem creating the files or communicating with Solr. * @throws SolrImportExportException if there is a problem in communicating with Solr. *//*from w w w . ja va2 s. c om*/ public static void exportIndex(String indexName, File toDir, String solrUrl, String timeField, String fromWhen) throws SolrServerException, IOException, SolrImportExportException { if (StringUtils.isBlank(solrUrl)) { throw new SolrImportExportException( "Could not construct solr URL for index" + indexName + ", aborting export."); } if (!toDir.exists() || !toDir.canWrite()) { throw new SolrImportExportException("Target directory " + toDir + " doesn't exist or is not writable, aborting export of index " + indexName); } HttpSolrServer solr = new HttpSolrServer(solrUrl); SolrQuery query = new SolrQuery("*:*"); if (StringUtils.isNotBlank(fromWhen)) { String lastValueFilter = makeFilterQuery(timeField, fromWhen); if (StringUtils.isNotBlank(lastValueFilter)) { query.addFilterQuery(lastValueFilter); } } query.setRows(0); query.setGetFieldStatistics(timeField); Map<String, FieldStatsInfo> fieldInfo = solr.query(query).getFieldStatsInfo(); if (fieldInfo == null || !fieldInfo.containsKey(timeField)) { log.warn("Cannot get earliest date, not exporting index " + indexName + ", time field " + timeField + ", from " + fromWhen); return; } FieldStatsInfo timeFieldInfo = fieldInfo.get(timeField); if (timeFieldInfo == null || timeFieldInfo.getMin() == null) { log.warn("Cannot get earliest date, not exporting index " + indexName + ", time field " + timeField + ", from " + fromWhen); return; } Date earliestTimestamp = (Date) timeFieldInfo.getMin(); query.setGetFieldStatistics(false); query.clearSorts(); query.setRows(0); query.setFacet(true); query.add(FacetParams.FACET_RANGE, timeField); query.add(FacetParams.FACET_RANGE_START, SOLR_DATE_FORMAT.format(earliestTimestamp) + "/MONTH"); query.add(FacetParams.FACET_RANGE_END, "NOW/MONTH+1MONTH"); query.add(FacetParams.FACET_RANGE_GAP, "+1MONTH"); query.setFacetMinCount(1); List<RangeFacet.Count> monthFacets = solr.query(query).getFacetRanges().get(0).getCounts(); for (RangeFacet.Count monthFacet : monthFacets) { Date monthStartDate; String monthStart = monthFacet.getValue(); try { monthStartDate = SOLR_DATE_FORMAT_NO_MS.parse(monthStart); } catch (java.text.ParseException e) { throw new SolrImportExportException("Could not read start of month batch as date: " + monthStart, e); } int docsThisMonth = monthFacet.getCount(); SolrQuery monthQuery = new SolrQuery("*:*"); monthQuery.setRows(ROWS_PER_FILE); monthQuery.set("wt", "csv"); monthQuery.set("fl", "*"); monthQuery.addFilterQuery(timeField + ":[" + monthStart + " TO " + monthStart + "+1MONTH]"); for (int i = 0; i < docsThisMonth; i += ROWS_PER_FILE) { monthQuery.setStart(i); URL url = new URL(solrUrl + "/select?" + monthQuery.toString()); File file = new File(toDir.getCanonicalPath(), makeExportFilename(indexName, monthStartDate, docsThisMonth, i)); if (file.createNewFile()) { FileUtils.copyURLToFile(url, file); log.info("Exported batch " + i + " to " + file.getCanonicalPath()); } else { throw new SolrImportExportException("Could not create file " + file.getCanonicalPath() + " while exporting index " + indexName + ", month" + monthStart + ", batch " + i); } } } }
From source file:org.dspace.util.SolrUpgradePre6xStatistics.java
License:BSD License
private long runReportQuery() throws SolrServerException { StringBuilder sb = new StringBuilder(MIGQUERY); SolrQuery sQ = new SolrQuery(); sQ.setQuery(sb.toString());/*from w w w .jav a 2 s . co m*/ sQ.setFacet(true); sQ.addFacetField("type"); sQ.addFacetField("scopeType"); QueryResponse sr = server.query(sQ); long total = 0; long unexpected = 0; for (FacetField ff : sr.getFacetFields()) { String s = ff.getName().equals("type") ? "View" : "Search"; for (FacetField.Count count : ff.getValues()) { String name = count.getName(); int id = Integer.parseInt(name); if (id == Constants.COMMUNITY) { name = "Community " + s; } else if (id == Constants.COLLECTION) { name = "Collection " + s; } else if (id == Constants.ITEM) { name = "Item " + s; } else if (id == Constants.BITSTREAM) { name = "Bistream " + s; } else { /* * In testing, I discovered some unexpected values in the scopeType field. It * looks like they may have been a result of a CSV import/export error. This * will group any unexpected values into one report line. */ unexpected += count.getCount(); continue; } System.out.println(String.format("\t%,12d\t%s", count.getCount(), name)); total += count.getCount(); } } if (unexpected > 0) { System.out.println(String.format("\t%,12d\t%s", unexpected, "Unexpected Type & Full Site")); total += unexpected; } long rem = sr.getResults().getNumFound() - total; if (rem > 0) { System.out.println(String.format("\t%,12d\t%s", rem, "Other Records")); total += rem; } return total; }
From source file:org.emonocot.persistence.dao.hibernate.SearchableDaoImpl.java
License:Open Source License
/** * @param query/*from www .j a v a 2s . c o m*/ * A lucene query * @param spatialQuery * A spatial query to filter the results by * @param pageSize * The maximum number of results to return * @param pageNumber * The offset (in pageSize chunks, 0-based) from the beginning of * the recordset * @param facets * The names of the facets you want to calculate * @param selectedFacets * A map of facets which you would like to restrict the search by * @param sort * A representation for the order results should be returned in * @param fetch * Set the fetch profile * @return a Page from the resultset * @throws SolrServerException */ public final Page<T> search(final String query, final String spatialQuery, final Integer pageSize, final Integer pageNumber, final String[] facets, Map<String, String> facetPrefixes, final Map<String, String> selectedFacets, final String sort, final String fetch) throws SolrServerException { SolrQuery solrQuery = prepareQuery(query, sort, pageSize, pageNumber, selectedFacets); solrQuery.set("spellcheck", "true"); solrQuery.set("spellcheck.collate", "true"); solrQuery.set("spellcheck.count", "1"); solrQuery.set("bq", "base.class_s:org.emonocot.model.Taxon^2.0"); // Filter the searchable objects out solrQuery.addFilterQuery("base.class_searchable_b:" + isSearchableObject()); if (spatialQuery != null && spatialQuery.trim().length() != 0) { solrQuery.addFilterQuery(spatialQuery); } if (facets != null && facets.length != 0) { solrQuery.setFacet(true); solrQuery.setFacetMinCount(1); solrQuery.setFacetSort(FacetParams.FACET_SORT_INDEX); for (String facet : facets) { if (facet.equals("base.class_s")) { solrQuery.setParam("f.base.class_s.facet.sort", FacetParams.FACET_SORT_COUNT); } if (facet.endsWith("_dt")) { /** * Is a date facet. Once Solr 4.2 is released, we can implement variable length buckets, but for now * stick with fixed buckets https://issues.apache.org/jira/browse/SOLR-2366 */ solrQuery.add("facet.range", facet); solrQuery.add("f." + facet + ".facet.range.start", "NOW/DAY-1YEARS"); solrQuery.add("f." + facet + ".facet.range.end", "NOW/DAY"); solrQuery.add("f." + facet + ".facet.range.gap", "+1MONTH"); } else { solrQuery.addFacetField(facet); } includeMissing(solrQuery, facet); } if (facetPrefixes != null) { for (String facet : facetPrefixes.keySet()) { solrQuery.add("f." + facet + ".facet.prefix", facetPrefixes.get(facet)); } } } QueryResponse queryResponse = solrServer.query(solrQuery); List<T> results = new ArrayList<T>(); for (SolrDocument solrDocument : queryResponse.getResults()) { T object = loadObjectForDocument(solrDocument); enableProfilePostQuery(object, fetch); results.add(object); } Long totalResults = new Long(queryResponse.getResults().getNumFound()); Page<T> page = new DefaultPageImpl<T>(totalResults.intValue(), pageNumber, pageSize, results, queryResponse); if (selectedFacets != null) { page.setSelectedFacets(selectedFacets); } page.setSort(sort); return page; }
From source file:org.emonocot.persistence.dao.hibernate.SearchableDaoImpl.java
License:Open Source License
public CellSet analyse(String rows, String cols, Integer firstCol, Integer maxCols, Integer firstRow, Integer maxRows, Map<String, String> selectedFacets, String[] facets, Cube cube) throws SolrServerException { SolrQuery query = new SolrQuery(); query.setQuery("*:*"); SolrQuery totalQuery = new SolrQuery(); totalQuery.setQuery("*:*"); // We're not interested in the results themselves query.setRows(1);//from w w w. j a v a2 s . c o m query.setStart(0); totalQuery.setRows(1); totalQuery.setStart(0); if (rows == null) { query.setFacet(true); query.setFacetMinCount(1); query.setFacetSort(FacetParams.FACET_SORT_INDEX); query.addFacetField(cube.getDefaultLevel()); includeMissing(query, cube.getDefaultLevel()); includeMissing(totalQuery, cube.getDefaultLevel()); if (maxRows != null) { totalQuery.setFacet(true); totalQuery.setFacetMinCount(1); totalQuery.addFacetField("{!key=totalRows}" + cube.getDefaultLevel()); query.add("f." + cube.getDefaultLevel() + ".facet.limit", maxRows.toString()); query.add("f." + cube.getDefaultLevel() + ".facet.mincount", "1"); if (firstRow != null) { query.add("f." + cube.getDefaultLevel() + ".facet.offset", firstRow.toString()); } } } else if (cols == null) { query.setFacet(true); query.setFacetMinCount(1); query.setFacetSort(FacetParams.FACET_SORT_INDEX); query.addFacetField(rows); includeMissing(query, rows); includeMissing(totalQuery, rows); if (maxRows != null) { totalQuery.setFacet(true); totalQuery.setFacetMinCount(1); totalQuery.addFacetField("{!key=totalRows}" + rows); query.add("f." + rows + ".facet.limit", maxRows.toString()); query.add("f." + rows + ".facet.mincount", "1"); if (firstRow != null) { query.add("f." + rows + ".facet.offset", firstRow.toString()); } } if (cube.getLevel(rows).isMultiValued() && cube.getLevel(rows).getHigher() != null) { Level higher = cube.getLevel(rows).getHigher(); totalQuery.add("f." + rows + ".facet.prefix", selectedFacets.get(higher.getFacet()) + "_"); query.add("f." + rows + ".facet.prefix", selectedFacets.get(higher.getFacet()) + "_"); } } else { query.setFacet(true); query.setFacetMinCount(1); query.setFacetSort(FacetParams.FACET_SORT_INDEX); query.addFacetField(rows); includeMissing(query, rows); includeMissing(totalQuery, rows); if (maxRows != null) { totalQuery.setFacet(true); totalQuery.setFacetMinCount(1); totalQuery.addFacetField("{!key=totalRows}" + rows); query.add("f." + rows + ".facet.limit", maxRows.toString()); query.add("f." + rows + ".facet.mincount", "1"); if (firstRow != null) { query.add("f." + rows + ".facet.offset", firstRow.toString()); } } if (cube.getLevel(rows).isMultiValued() && cube.getLevel(rows).getHigher() != null) { Level higher = cube.getLevel(rows).getHigher(); totalQuery.add("f." + rows + ".facet.prefix", selectedFacets.get(higher.getFacet()) + "_"); query.add("f." + rows + ".facet.prefix", selectedFacets.get(higher.getFacet()) + "_"); } query.addFacetField(cols); includeMissing(query, cols); if (maxCols != null) { totalQuery.setFacet(true); totalQuery.setFacetMinCount(1); totalQuery.addFacetField("{!key=totalCols}" + cols); /** * Facet pivot does not behave the same way on columns - the limit is */ //query.add("f." + cols + ".facet.limit", maxCols.toString()); //query.add("f." + cols + ".facet.mincount", "1"); //if (firstCol != null) { // query.add("f." + cols + ".facet.offset", firstCol.toString()); //} } if (cube.getLevel(cols).isMultiValued() && cube.getLevel(cols).getHigher() != null) { Level higher = cube.getLevel(cols).getHigher(); totalQuery.add("f." + cols + ".facet.prefix", selectedFacets.get(higher.getFacet()) + "_"); query.add("f." + cols + ".facet.prefix", selectedFacets.get(higher.getFacet()) + "_"); } query.addFacetPivotField(rows + "," + cols); } if (selectedFacets != null && !selectedFacets.isEmpty()) { for (String facetName : selectedFacets.keySet()) { String facetValue = selectedFacets.get(facetName); if (StringUtils.isNotEmpty(facetValue)) { totalQuery.addFilterQuery(facetName + ":" + selectedFacets.get(facetName)); query.addFilterQuery(facetName + ":" + selectedFacets.get(facetName)); } else {//Subtract/Exclude documents with any value for the facet totalQuery.addFilterQuery("-" + facetName + ":[* TO *]"); query.addFilterQuery("-" + facetName + ":[* TO *]"); } } } if (facets != null && facets.length != 0) { query.setFacet(true); query.setFacetMinCount(1); query.setFacetSort(FacetParams.FACET_SORT_INDEX); for (String facetName : facets) { if (rows != null && rows.equals(facetName)) { } else if (cols != null && cols.equals(facetName)) { } else if (rows == null && facetName.equals(cube.getDefaultLevel())) { } else { includeMissing(query, facetName); query.addFacetField(facetName); } } } QueryResponse response = solrServer.query(query); QueryResponse totalResponse = solrServer.query(totalQuery); FacetField totalRows = null; FacetField totalCols = null; if (totalResponse.getFacetField("totalRows") != null) { totalRows = totalResponse.getFacetField("totalRows"); } if (totalResponse.getFacetField("totalCols") != null) { totalCols = totalResponse.getFacetField("totalCols"); } CellSet cellSet = new CellSet(response, selectedFacets, query, rows, cols, firstRow, maxRows, firstCol, maxCols, totalRows, totalCols, cube); return cellSet; }