Example usage for org.apache.solr.client.solrj SolrQuery setRows

List of usage examples for org.apache.solr.client.solrj SolrQuery setRows

Introduction

In this page you can find the example usage for org.apache.solr.client.solrj SolrQuery setRows.

Prototype

public SolrQuery setRows(Integer rows) 

Source Link

Usage

From source file:org.dspace.statistics.SolrLoggerServiceImpl.java

License:BSD License

@Override
public void shardSolrIndex() throws IOException, SolrServerException {
    /*/* w ww.j a  v  a  2s  .c o  m*/
    Start by faceting by year so we can include each year in a separate core !
     */
    SolrQuery yearRangeQuery = new SolrQuery();
    yearRangeQuery.setQuery("*:*");
    yearRangeQuery.setRows(0);
    yearRangeQuery.setFacet(true);
    yearRangeQuery.add(FacetParams.FACET_RANGE, "time");
    //We go back to 2000 the year 2000, this is a bit overkill but this way we ensure we have everything
    //The alternative would be to sort but that isn't recommended since it would be a very costly query !
    yearRangeQuery.add(FacetParams.FACET_RANGE_START,
            "NOW/YEAR-" + (Calendar.getInstance().get(Calendar.YEAR) - 2000) + "YEARS");
    //Add the +0year to ensure that we DO NOT include the current year
    yearRangeQuery.add(FacetParams.FACET_RANGE_END, "NOW/YEAR+0YEARS");
    yearRangeQuery.add(FacetParams.FACET_RANGE_GAP, "+1YEAR");
    yearRangeQuery.add(FacetParams.FACET_MINCOUNT, String.valueOf(1));

    //Create a temp directory to store our files in !
    File tempDirectory = new File(
            configurationService.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
    tempDirectory.mkdirs();

    QueryResponse queryResponse = solr.query(yearRangeQuery);
    //We only have one range query !
    List<RangeFacet.Count> yearResults = queryResponse.getFacetRanges().get(0).getCounts();
    for (RangeFacet.Count count : yearResults) {
        long totalRecords = count.getCount();

        //Create a range query from this !
        //We start with out current year
        DCDate dcStart = new DCDate(count.getValue());
        Calendar endDate = Calendar.getInstance();
        //Advance one year for the start of the next one !
        endDate.setTime(dcStart.toDate());
        endDate.add(Calendar.YEAR, 1);
        DCDate dcEndDate = new DCDate(endDate.getTime());

        StringBuilder filterQuery = new StringBuilder();
        filterQuery.append("time:([");
        filterQuery.append(ClientUtils.escapeQueryChars(dcStart.toString()));
        filterQuery.append(" TO ");
        filterQuery.append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append("]");
        //The next part of the filter query excludes the content from midnight of the next year !
        filterQuery.append(" NOT ").append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append(")");

        Map<String, String> yearQueryParams = new HashMap<String, String>();
        yearQueryParams.put(CommonParams.Q, "*:*");
        yearQueryParams.put(CommonParams.ROWS, String.valueOf(10000));
        yearQueryParams.put(CommonParams.FQ, filterQuery.toString());
        yearQueryParams.put(CommonParams.WT, "csv");

        //Start by creating a new core
        String coreName = "statistics-" + dcStart.getYear();
        HttpSolrServer statisticsYearServer = createCore(solr, coreName);

        System.out.println("Moving: " + totalRecords + " into core " + coreName);
        log.info("Moving: " + totalRecords + " records into core " + coreName);

        List<File> filesToUpload = new ArrayList<File>();
        for (int i = 0; i < totalRecords; i += 10000) {
            String solrRequestUrl = solr.getBaseURL() + "/select";
            solrRequestUrl = generateURL(solrRequestUrl, yearQueryParams);

            HttpGet get = new HttpGet(solrRequestUrl);
            HttpResponse response = new DefaultHttpClient().execute(get);
            InputStream csvInputstream = response.getEntity().getContent();
            //Write the csv ouput to a file !
            File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYear()
                    + "." + i + ".csv");
            FileUtils.copyInputStreamToFile(csvInputstream, csvFile);
            filesToUpload.add(csvFile);

            //Add 10000 & start over again
            yearQueryParams.put(CommonParams.START, String.valueOf((i + 10000)));
        }

        for (File tempCsv : filesToUpload) {
            //Upload the data in the csv files to our new solr core
            ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest(
                    "/update/csv");
            contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
            contentStreamUpdateRequest.setParam("skip", "_version_");
            contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");

            statisticsYearServer.request(contentStreamUpdateRequest);
        }
        statisticsYearServer.commit(true, true);

        //Delete contents of this year from our year query !
        solr.deleteByQuery(filterQuery.toString());
        solr.commit(true, true);

        log.info("Moved " + totalRecords + " records into core: " + coreName);
    }

    FileUtils.deleteDirectory(tempDirectory);
}

From source file:org.dspace.statistics.SolrLoggerServiceImpl.java

License:BSD License

@Override
public void reindexBitstreamHits(boolean removeDeletedBitstreams) throws Exception {
    Context context = new Context();

    try {// w  ww  .j  a va  2s . co  m
        //First of all retrieve the total number of records to be updated
        SolrQuery query = new SolrQuery();
        query.setQuery("*:*");
        query.addFilterQuery("type:" + Constants.BITSTREAM);
        //Only retrieve records which do not have a bundle name
        query.addFilterQuery("-bundleName:[* TO *]");
        query.setRows(0);
        addAdditionalSolrYearCores(query);
        long totalRecords = solr.query(query).getResults().getNumFound();

        File tempDirectory = new File(
                configurationService.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
        tempDirectory.mkdirs();
        List<File> tempCsvFiles = new ArrayList<File>();
        for (int i = 0; i < totalRecords; i += 10000) {
            Map<String, String> params = new HashMap<String, String>();
            params.put(CommonParams.Q, "*:*");
            params.put(CommonParams.FQ, "-bundleName:[* TO *] AND type:" + Constants.BITSTREAM);
            params.put(CommonParams.WT, "csv");
            params.put(CommonParams.ROWS, String.valueOf(10000));
            params.put(CommonParams.START, String.valueOf(i));

            String solrRequestUrl = solr.getBaseURL() + "/select";
            solrRequestUrl = generateURL(solrRequestUrl, params);

            HttpGet get = new HttpGet(solrRequestUrl);
            HttpResponse response = new DefaultHttpClient().execute(get);

            InputStream csvOutput = response.getEntity().getContent();
            Reader csvReader = new InputStreamReader(csvOutput);
            List<String[]> rows = new CSVReader(csvReader).readAll();
            String[][] csvParsed = rows.toArray(new String[rows.size()][]);
            String[] header = csvParsed[0];
            //Attempt to find the bitstream id index !
            int idIndex = 0;
            for (int j = 0; j < header.length; j++) {
                if (header[j].equals("id")) {
                    idIndex = j;
                }
            }

            File tempCsv = new File(tempDirectory.getPath() + File.separatorChar + "temp." + i + ".csv");
            tempCsvFiles.add(tempCsv);
            CSVWriter csvp = new CSVWriter(new FileWriter(tempCsv));
            //csvp.setAlwaysQuote(false);

            //Write the header !
            csvp.writeNext((String[]) ArrayUtils.add(header, "bundleName"));
            Map<String, String> bitBundleCache = new HashMap<>();
            //Loop over each line (skip the headers though)!
            for (int j = 1; j < csvParsed.length; j++) {
                String[] csvLine = csvParsed[j];
                //Write the default line !
                String bitstreamId = csvLine[idIndex];
                //Attempt to retrieve our bundle name from the cache !
                String bundleName = bitBundleCache.get(bitstreamId);
                if (bundleName == null) {
                    //Nothing found retrieve the bitstream
                    Bitstream bitstream = bitstreamService.findByIdOrLegacyId(context, bitstreamId);
                    //Attempt to retrieve our bitstream !
                    if (bitstream != null) {
                        List<Bundle> bundles = bitstream.getBundles();
                        if (bundles != null && 0 < bundles.size()) {
                            Bundle bundle = bundles.get(0);
                            bundleName = bundle.getName();
                        } else {
                            //No bundle found, we are either a collection or a community logo, check for it !
                            DSpaceObject parentObject = bitstreamService.getParentObject(context, bitstream);
                            if (parentObject instanceof Collection) {
                                bundleName = "LOGO-COLLECTION";
                            } else if (parentObject instanceof Community) {
                                bundleName = "LOGO-COMMUNITY";
                            }

                        }
                        //Cache the bundle name
                        bitBundleCache.put(bitstream.getID().toString(), bundleName);
                        //Remove the bitstream from cache
                    }
                    //Check if we don't have a bundlename
                    //If we don't have one & we do not need to delete the deleted bitstreams ensure that a BITSTREAM_DELETED bundle name is given !
                    if (bundleName == null && !removeDeletedBitstreams) {
                        bundleName = "BITSTREAM_DELETED";
                    }
                }
                csvp.writeNext((String[]) ArrayUtils.add(csvLine, bundleName));
            }

            //Loop over our parsed csv
            csvp.flush();
            csvp.close();
        }

        //Add all the separate csv files
        for (File tempCsv : tempCsvFiles) {
            ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest(
                    "/update/csv");
            contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
            contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");

            solr.request(contentStreamUpdateRequest);
        }

        //Now that all our new bitstream stats are in place, delete all the old ones !
        solr.deleteByQuery("-bundleName:[* TO *] AND type:" + Constants.BITSTREAM);
        //Commit everything to wrap up
        solr.commit(true, true);
        //Clean up our directory !
        FileUtils.deleteDirectory(tempDirectory);
    } catch (Exception e) {
        log.error("Error while updating the bitstream statistics", e);
        throw e;
    } finally {
        context.abort();
    }
}

From source file:org.dspace.util.SolrImportExport.java

License:BSD License

/**
 * Exports documents from the given index to the specified target directory in batches of #ROWS_PER_FILE, starting at fromWhen (or all documents).
 * See #makeExportFilename for the file names that are generated.
 *
 * @param indexName The index to export.
 * @param toDir The target directory for the export. Will be created if it doesn't exist yet. The directory must be writeable.
 * @param solrUrl The solr URL for the index to export. Must not be null.
 * @param timeField The time field to use for sorting the export. Must not be null.
 * @param fromWhen Optionally, from when to export. See options for allowed values. If null or empty, all documents will be exported.
 * @throws SolrServerException if there is a problem with exporting the index.
 * @throws IOException if there is a problem creating the files or communicating with Solr.
 * @throws SolrImportExportException if there is a problem in communicating with Solr.
 */// ww w. j  a va2s  .  co m
public static void exportIndex(String indexName, File toDir, String solrUrl, String timeField, String fromWhen)
        throws SolrServerException, IOException, SolrImportExportException {
    if (StringUtils.isBlank(solrUrl)) {
        throw new SolrImportExportException(
                "Could not construct solr URL for index" + indexName + ", aborting export.");
    }

    if (!toDir.exists() || !toDir.canWrite()) {
        throw new SolrImportExportException("Target directory " + toDir
                + " doesn't exist or is not writable, aborting export of index " + indexName);
    }

    HttpSolrServer solr = new HttpSolrServer(solrUrl);

    SolrQuery query = new SolrQuery("*:*");
    if (StringUtils.isNotBlank(fromWhen)) {
        String lastValueFilter = makeFilterQuery(timeField, fromWhen);
        if (StringUtils.isNotBlank(lastValueFilter)) {
            query.addFilterQuery(lastValueFilter);
        }
    }

    query.setRows(0);
    query.setGetFieldStatistics(timeField);
    Map<String, FieldStatsInfo> fieldInfo = solr.query(query).getFieldStatsInfo();
    if (fieldInfo == null || !fieldInfo.containsKey(timeField)) {
        log.warn("Cannot get earliest date, not exporting index " + indexName + ", time field " + timeField
                + ", from " + fromWhen);
        return;
    }
    FieldStatsInfo timeFieldInfo = fieldInfo.get(timeField);
    if (timeFieldInfo == null || timeFieldInfo.getMin() == null) {
        log.warn("Cannot get earliest date, not exporting index " + indexName + ", time field " + timeField
                + ", from " + fromWhen);
        return;
    }
    Date earliestTimestamp = (Date) timeFieldInfo.getMin();

    query.setGetFieldStatistics(false);
    query.clearSorts();
    query.setRows(0);
    query.setFacet(true);
    query.add(FacetParams.FACET_RANGE, timeField);
    query.add(FacetParams.FACET_RANGE_START, SOLR_DATE_FORMAT.format(earliestTimestamp) + "/MONTH");
    query.add(FacetParams.FACET_RANGE_END, "NOW/MONTH+1MONTH");
    query.add(FacetParams.FACET_RANGE_GAP, "+1MONTH");
    query.setFacetMinCount(1);

    List<RangeFacet.Count> monthFacets = solr.query(query).getFacetRanges().get(0).getCounts();

    for (RangeFacet.Count monthFacet : monthFacets) {
        Date monthStartDate;
        String monthStart = monthFacet.getValue();
        try {
            monthStartDate = SOLR_DATE_FORMAT_NO_MS.parse(monthStart);
        } catch (java.text.ParseException e) {
            throw new SolrImportExportException("Could not read start of month batch as date: " + monthStart,
                    e);
        }
        int docsThisMonth = monthFacet.getCount();

        SolrQuery monthQuery = new SolrQuery("*:*");
        monthQuery.setRows(ROWS_PER_FILE);
        monthQuery.set("wt", "csv");
        monthQuery.set("fl", "*");

        monthQuery.addFilterQuery(timeField + ":[" + monthStart + " TO " + monthStart + "+1MONTH]");

        for (int i = 0; i < docsThisMonth; i += ROWS_PER_FILE) {
            monthQuery.setStart(i);
            URL url = new URL(solrUrl + "/select?" + monthQuery.toString());

            File file = new File(toDir.getCanonicalPath(),
                    makeExportFilename(indexName, monthStartDate, docsThisMonth, i));
            if (file.createNewFile()) {
                FileUtils.copyURLToFile(url, file);
                log.info("Exported batch " + i + " to " + file.getCanonicalPath());
            } else {
                throw new SolrImportExportException("Could not create file " + file.getCanonicalPath()
                        + " while exporting index " + indexName + ", month" + monthStart + ", batch " + i);
            }
        }
    }
}

From source file:org.dspace.util.SolrUpgradePre6xStatistics.java

License:BSD License

private int updateRecords(String query) throws SolrServerException, SQLException, IOException {
    int initNumProcessed = numProcessed;
    SolrQuery sQ = new SolrQuery();
    sQ.setQuery(query);/*  www  .  ja  v a  2s  . c o  m*/
    sQ.setRows(batchSize);

    // Ensure that items are grouped by id
    // Sort by id fails due to presense of id and string fields. The ord function
    // seems to help
    sQ.addSort("type", SolrQuery.ORDER.desc);
    sQ.addSort("scopeType", SolrQuery.ORDER.desc);
    sQ.addSort("ord(owningItem)", SolrQuery.ORDER.desc);
    sQ.addSort("id", SolrQuery.ORDER.asc);
    sQ.addSort("scopeId", SolrQuery.ORDER.asc);

    QueryResponse sr = server.query(sQ);
    SolrDocumentList sdl = sr.getResults();

    for (int i = 0; i < sdl.size() && (numProcessed < numRec); i++) {
        SolrDocument sd = sdl.get(i);
        SolrInputDocument input = ClientUtils.toSolrInputDocument(sd);
        input.remove("_version_");
        for (FIELD col : FIELD.values()) {
            mapField(input, col);
        }

        docs.add(input);
        ++numProcessed;
    }
    return numProcessed - initNumProcessed;
}

From source file:org.eclipse.rdf4j.sail.solr.SolrIndex.java

License:Open Source License

/**
 * Evaluates the given query and returns the results as a TopDocs instance.
 * /*  w  w  w.j ava  2 s .  c  om*/
 * @throws SolrServerException
 */
public QueryResponse search(SolrQuery query) throws SolrServerException, IOException {
    int nDocs;
    if (maxDocs > 0) {
        nDocs = maxDocs;
    } else {
        long docCount = client.query(query.setRows(0)).getResults().getNumFound();
        nDocs = Math.max((int) Math.min(docCount, Integer.MAX_VALUE), 1);
    }
    return client.query(query.setRows(nDocs));
}

From source file:org.eclipse.recommenders.coordinates.maven.MavenCentralFingerprintSearchAdvisor.java

License:Open Source License

@Override
protected Optional<ProjectCoordinate> doSuggest(DependencyInfo dependencyInfo) {
    if (!dependencyInfo.getFile().isFile()) {
        return absent();
    }//from   w  ww . j a  va 2 s  .  co  m

    try {
        SolrQuery query = new SolrQuery();
        query.setQuery("1:\"" + Fingerprints.sha1(dependencyInfo.getFile()) + "\"");
        query.setRows(1);
        QueryResponse response = server.query(query);
        SolrDocumentList results = response.getResults();

        for (SolrDocument document : results) {
            if (!SUPPORTED_PACKAGINGS.contains(document.get(FIELD_PACKAGING))) {
                continue;
            }

            String groupId = (String) document.get(FIELD_GROUP_ID);
            String artifactId = (String) document.get(FIELD_ARTIFACT_ID);
            String version = (String) document.get(FIELD_VERSION);

            return tryNewProjectCoordinate(groupId, artifactId, canonicalizeVersion(version));
        }

        return absent();
    } catch (SolrServerException e) {
        LOG.error("Exception when querying Solr Server <{}>", SEARCH_MAVEN_ORG, e);
        return absent();
    }
}

From source file:org.emonocot.persistence.dao.hibernate.SearchableDaoImpl.java

License:Open Source License

public List<Match> autocomplete(final String query, Integer pageSize, Map<String, String> selectedFacets)
        throws SolrServerException {
    SolrQuery solrQuery = new SolrQuery();

    if (query != null && !query.trim().equals("")) {
        //String searchString = query.trim().replace(" ", "+");
        solrQuery.setQuery(query);//from w  w  w  .j  a va  2s .  c  o m
    } else {
        return new ArrayList<Match>();
    }

    // Filter the searchable objects out
    solrQuery.addFilterQuery("base.class_searchable_b:" + isSearchableObject());

    // Set additional result parameters
    //solrQuery.setRows(pageSize);
    int rows = 100;
    solrQuery.setRows(rows);

    if (selectedFacets != null && !selectedFacets.isEmpty()) {
        for (String facetName : selectedFacets.keySet()) {
            solrQuery.addFilterQuery(facetName + ":" + selectedFacets.get(facetName));
        }
    }

    solrQuery.set("defType", "edismax");
    solrQuery.set("qf", "autocomplete^3 autocompleteng");
    solrQuery.set("pf", "autocompletenge");
    solrQuery.set("fl", "autocomplete,id");
    solrQuery.setHighlight(true);
    solrQuery.set("hl.fl", "autocomplete");
    solrQuery.set("hl.snippets", 3);
    solrQuery.setHighlightSimplePre("<b>");
    solrQuery.setHighlightSimplePost("</b>");
    //solrQuery.setSortField("autocomplete", SolrQuery.ORDER.valueOf("desc"));
    /*
    solrQuery.set("group","true");
    solrQuery.set("group.field", "autocomplete");
     */

    QueryResponse queryResponse = solrServer.query(solrQuery);

    List<Match> results = new ArrayList<Match>();
    Map<String, Match> matchMap = new HashMap<String, Match>();

    for (SolrDocument solrDocument : queryResponse.getResults()) {
        Match match = new Match();
        String label = filter((String) solrDocument.get("autocomplete"));
        match.setLabel(label);
        match.setValue(label);
        matchMap.put((String) solrDocument.get("id"), match);
        results.add(match);
    }

    List<Match> distinctResults = removeDuplicates(results);

    List<Match> subResults1 = new ArrayList<Match>(); //ExactMatch
    List<Match> subResults2 = new ArrayList<Match>();

    for (Match item : distinctResults) {
        if ((item.getLabel().toLowerCase().startsWith(query.toLowerCase()))) {
            subResults1.add(item);
        } else {
            subResults2.add(item);
        }
    }

    if (subResults1.size() > 0) {
        Collections.sort(subResults1);
    }

    /*
    Collections.sort(subResults1, new Comparator() {
       @Override
       public int compare(Object matchOne, Object matchTwo) {
    //use instanceof to verify the references are indeed of the type in question
    return ((Match)matchOne).getLabel()
          .compareTo(((Match)matchTwo).getLabel());
       }
    });
    */

    subResults1.addAll(subResults2);
    List<Match> subResults = subResults1;

    List<Match> finalResults = new ArrayList<Match>();

    if (subResults.size() > 10) {
        finalResults = subResults.subList(0, 10);
    } else {
        finalResults = subResults;
    }

    //subResults = finalResults;

    /*
    for(GroupCommand groupCommand : queryResponse.getGroupResponse().getValues()) {
       for (Group group : groupCommand.getValues()) {
    for (SolrDocument solrDocument : group.getResult()) {
       Match match = new Match();
       String label = filter((String) solrDocument.get("autocomplete"));
       match.setLabel(label);
       match.setValue(label);
       matchMap.put((String) solrDocument.get("id"), match);
       results.add(match);
    }
       }
    }
    */
    for (String documentId : matchMap.keySet()) {
        if (queryResponse.getHighlighting().containsKey(documentId)) {
            Map<String, List<String>> highlightedTerms = queryResponse.getHighlighting().get(documentId);
            if (highlightedTerms.containsKey("autocomplete")) {
                matchMap.get(documentId).setLabel(highlightedTerms.get("autocomplete").get(0));
            }
        }
    }

    //return results;
    return finalResults;
}

From source file:org.emonocot.persistence.dao.hibernate.SearchableDaoImpl.java

License:Open Source License

public CellSet analyse(String rows, String cols, Integer firstCol, Integer maxCols, Integer firstRow,
        Integer maxRows, Map<String, String> selectedFacets, String[] facets, Cube cube)
        throws SolrServerException {
    SolrQuery query = new SolrQuery();
    query.setQuery("*:*");
    SolrQuery totalQuery = new SolrQuery();
    totalQuery.setQuery("*:*");

    // We're not interested in the results themselves
    query.setRows(1);
    query.setStart(0);/*w  w w  .  ja va  2 s . co m*/
    totalQuery.setRows(1);
    totalQuery.setStart(0);

    if (rows == null) {
        query.setFacet(true);
        query.setFacetMinCount(1);
        query.setFacetSort(FacetParams.FACET_SORT_INDEX);
        query.addFacetField(cube.getDefaultLevel());
        includeMissing(query, cube.getDefaultLevel());
        includeMissing(totalQuery, cube.getDefaultLevel());
        if (maxRows != null) {
            totalQuery.setFacet(true);
            totalQuery.setFacetMinCount(1);
            totalQuery.addFacetField("{!key=totalRows}" + cube.getDefaultLevel());

            query.add("f." + cube.getDefaultLevel() + ".facet.limit", maxRows.toString());
            query.add("f." + cube.getDefaultLevel() + ".facet.mincount", "1");
            if (firstRow != null) {
                query.add("f." + cube.getDefaultLevel() + ".facet.offset", firstRow.toString());
            }
        }
    } else if (cols == null) {
        query.setFacet(true);
        query.setFacetMinCount(1);
        query.setFacetSort(FacetParams.FACET_SORT_INDEX);
        query.addFacetField(rows);
        includeMissing(query, rows);
        includeMissing(totalQuery, rows);
        if (maxRows != null) {
            totalQuery.setFacet(true);
            totalQuery.setFacetMinCount(1);
            totalQuery.addFacetField("{!key=totalRows}" + rows);
            query.add("f." + rows + ".facet.limit", maxRows.toString());
            query.add("f." + rows + ".facet.mincount", "1");
            if (firstRow != null) {
                query.add("f." + rows + ".facet.offset", firstRow.toString());
            }
        }
        if (cube.getLevel(rows).isMultiValued() && cube.getLevel(rows).getHigher() != null) {
            Level higher = cube.getLevel(rows).getHigher();
            totalQuery.add("f." + rows + ".facet.prefix", selectedFacets.get(higher.getFacet()) + "_");
            query.add("f." + rows + ".facet.prefix", selectedFacets.get(higher.getFacet()) + "_");
        }
    } else {
        query.setFacet(true);
        query.setFacetMinCount(1);
        query.setFacetSort(FacetParams.FACET_SORT_INDEX);
        query.addFacetField(rows);
        includeMissing(query, rows);
        includeMissing(totalQuery, rows);
        if (maxRows != null) {
            totalQuery.setFacet(true);
            totalQuery.setFacetMinCount(1);
            totalQuery.addFacetField("{!key=totalRows}" + rows);
            query.add("f." + rows + ".facet.limit", maxRows.toString());
            query.add("f." + rows + ".facet.mincount", "1");
            if (firstRow != null) {
                query.add("f." + rows + ".facet.offset", firstRow.toString());
            }
        }
        if (cube.getLevel(rows).isMultiValued() && cube.getLevel(rows).getHigher() != null) {
            Level higher = cube.getLevel(rows).getHigher();
            totalQuery.add("f." + rows + ".facet.prefix", selectedFacets.get(higher.getFacet()) + "_");
            query.add("f." + rows + ".facet.prefix", selectedFacets.get(higher.getFacet()) + "_");
        }
        query.addFacetField(cols);
        includeMissing(query, cols);
        if (maxCols != null) {
            totalQuery.setFacet(true);
            totalQuery.setFacetMinCount(1);
            totalQuery.addFacetField("{!key=totalCols}" + cols);
            /**
             * Facet pivot does not behave the same way on columns - the limit is
             */
            //query.add("f." + cols + ".facet.limit", maxCols.toString());
            //query.add("f." + cols + ".facet.mincount", "1");
            //if (firstCol != null) {
            //   query.add("f." + cols + ".facet.offset", firstCol.toString());
            //}
        }
        if (cube.getLevel(cols).isMultiValued() && cube.getLevel(cols).getHigher() != null) {
            Level higher = cube.getLevel(cols).getHigher();
            totalQuery.add("f." + cols + ".facet.prefix", selectedFacets.get(higher.getFacet()) + "_");
            query.add("f." + cols + ".facet.prefix", selectedFacets.get(higher.getFacet()) + "_");
        }
        query.addFacetPivotField(rows + "," + cols);
    }

    if (selectedFacets != null && !selectedFacets.isEmpty()) {
        for (String facetName : selectedFacets.keySet()) {
            String facetValue = selectedFacets.get(facetName);
            if (StringUtils.isNotEmpty(facetValue)) {
                totalQuery.addFilterQuery(facetName + ":" + selectedFacets.get(facetName));
                query.addFilterQuery(facetName + ":" + selectedFacets.get(facetName));
            } else {//Subtract/Exclude documents with any value for the facet
                totalQuery.addFilterQuery("-" + facetName + ":[* TO *]");
                query.addFilterQuery("-" + facetName + ":[* TO *]");
            }
        }
    }

    if (facets != null && facets.length != 0) {
        query.setFacet(true);
        query.setFacetMinCount(1);
        query.setFacetSort(FacetParams.FACET_SORT_INDEX);

        for (String facetName : facets) {
            if (rows != null && rows.equals(facetName)) {
            } else if (cols != null && cols.equals(facetName)) {
            } else if (rows == null && facetName.equals(cube.getDefaultLevel())) {
            } else {
                includeMissing(query, facetName);
                query.addFacetField(facetName);
            }
        }
    }

    QueryResponse response = solrServer.query(query);
    QueryResponse totalResponse = solrServer.query(totalQuery);
    FacetField totalRows = null;
    FacetField totalCols = null;
    if (totalResponse.getFacetField("totalRows") != null) {
        totalRows = totalResponse.getFacetField("totalRows");
    }

    if (totalResponse.getFacetField("totalCols") != null) {
        totalCols = totalResponse.getFacetField("totalCols");
    }

    CellSet cellSet = new CellSet(response, selectedFacets, query, rows, cols, firstRow, maxRows, firstCol,
            maxCols, totalRows, totalCols, cube);

    return cellSet;
}

From source file:org.emonocot.persistence.dao.hibernate.SearchableDaoImpl.java

License:Open Source License

/**
 * Prepares a {@link SolrQuery} with the parameters passed in
 * @param query//from w ww  . j av a  2  s .  c o m
 * @param sort
 * @param pageSize
 * @param pageNumber
 * @param selectedFacets
 * @return A {@link SolrQuery} that can be customised before passing to a {@link SolrServer}
 */
protected SolrQuery prepareQuery(String query, String sort, Integer pageSize, Integer pageNumber,
        Map<String, String> selectedFacets) {
    SolrQuery solrQuery = new SolrQuery();

    if (query != null && !query.trim().equals("")) {
        String searchString = null;
        if (query.indexOf(":") != -1) {
            searchString = query;
        } else {
            // replace spaces with '+' so that we search on terms
            searchString = query.trim().replace(" ", "+");
            solrQuery.set("defType", "edismax");
            solrQuery.set("qf", "searchable.label_sort searchable.solrsummary_t");
        }
        solrQuery.setQuery(searchString);

    } else {
        solrQuery.set("defType", "edismax");
        solrQuery.set("qf", "searchable.label_sort searchable.solrsummary_t");
        solrQuery.setQuery("*:*");
    }

    if (sort != null && sort.length() != 0) {
        for (String singleSort : sort.split(",")) {
            if (singleSort.equals("_asc")) {
                //Do nothing
            } else if (singleSort.endsWith("_asc")) {
                String sortField = singleSort.substring(0, singleSort.length() - 4);
                solrQuery.addSortField(sortField, SolrQuery.ORDER.asc);
            } else if (singleSort.endsWith("_desc")) {
                String sortField = singleSort.substring(0, singleSort.length() - 5);
                solrQuery.addSortField(sortField, SolrQuery.ORDER.desc);
            }
        }
    }

    if (pageSize != null) {
        solrQuery.setRows(pageSize);
        if (pageNumber != null) {
            solrQuery.setStart(pageSize * pageNumber);
        }
    }

    if (selectedFacets != null && !selectedFacets.isEmpty()) {
        for (String facetName : selectedFacets.keySet()) {
            String facetValue = selectedFacets.get(facetName);
            if (StringUtils.isNotEmpty(facetValue)) {
                solrQuery.addFilterQuery(facetName + ":" + selectedFacets.get(facetName));
            } else {//Subtract/Exclude documents with any value for the facet
                solrQuery.addFilterQuery("-" + facetName + ":[* TO *]");
            }
        }
    }

    return solrQuery;
}

From source file:org.entando.entando.plugins.jpsolrclient.aps.system.services.content.Searcher.java

License:Open Source License

public List<String> extractAllContentsId() throws ApsSystemException {
    List<String> contentsId = null;
    try {/*from   w ww .ja v  a 2  s .  co m*/
        SolrQuery solrQuery = new SolrQuery("id:*");
        solrQuery.setRows(10000);
        contentsId = this.executeQuery(solrQuery);
    } catch (Throwable t) {
        throw new ApsSystemException("Error extracting response", t);
    }
    return contentsId;
}