Example usage for org.apache.solr.common.params CommonParams FQ

List of usage examples for org.apache.solr.common.params CommonParams FQ

Introduction

In this page you can find the example usage for org.apache.solr.common.params CommonParams FQ.

Prototype

String FQ

To view the source code for org.apache.solr.common.params CommonParams FQ.

Click Source Link

Document

Lucene query string(s) for filtering the results without affecting scoring

Usage

From source file:org.dspace.app.xmlui.aspect.discovery.json.JSONSolrSearcher.java

License:BSD License

public void generate() throws IOException, SAXException, ProcessingException {
    if (solrServerUrl == null) {
        return;/*from   w  w  w . j  a v a2  s . co  m*/
    }

    Map<String, String> params = new HashMap<String, String>();

    String solrRequestUrl = solrServerUrl + "/select";

    //Add our default parameters
    params.put(CommonParams.ROWS, "0");
    params.put(CommonParams.WT, "json");
    //We uwe json as out output type
    params.put("json.nl", "map");
    params.put("json.wrf", jsonWrf);
    params.put(FacetParams.FACET, Boolean.TRUE.toString());

    //Generate our json out of the given params
    try {
        params.put(CommonParams.Q, URLEncoder.encode(query, Constants.DEFAULT_ENCODING));
    } catch (UnsupportedEncodingException uee) {
        //Should never occur
        return;
    }

    params.put(FacetParams.FACET_LIMIT, String.valueOf(facetLimit));
    if (facetSort != null) {
        params.put(FacetParams.FACET_SORT, facetSort);
    }
    params.put(FacetParams.FACET_MINCOUNT, String.valueOf(facetMinCount));

    solrRequestUrl = AbstractDSpaceTransformer.generateURL(solrRequestUrl, params);
    if (facetFields != null || filterQueries != null) {
        StringBuilder urlBuilder = new StringBuilder(solrRequestUrl);
        if (facetFields != null) {

            //Add our facet fields
            for (String facetField : facetFields) {
                urlBuilder.append("&").append(FacetParams.FACET_FIELD).append("=");

                //This class can only be used for autocomplete facet fields
                if (!facetField.endsWith(".year") && !facetField.endsWith("_ac")) {
                    urlBuilder.append(URLEncoder.encode(facetField + "_ac", Constants.DEFAULT_ENCODING));
                } else {
                    urlBuilder.append(URLEncoder.encode(facetField, Constants.DEFAULT_ENCODING));
                }
            }

        }
        if (filterQueries != null) {
            for (String filterQuery : filterQueries) {
                urlBuilder.append("&").append(CommonParams.FQ).append("=")
                        .append(URLEncoder.encode(filterQuery, Constants.DEFAULT_ENCODING));
            }
        }

        solrRequestUrl = urlBuilder.toString();
    }

    try {
        GetMethod get = new GetMethod(solrRequestUrl);
        new HttpClient().executeMethod(get);
        String result = get.getResponseBodyAsString();
        if (result != null) {
            ByteArrayInputStream inputStream = new ByteArrayInputStream(result.getBytes("UTF-8"));

            byte[] buffer = new byte[8192];

            response.setHeader("Content-Length", String.valueOf(result.length()));
            int length;
            while ((length = inputStream.read(buffer)) > -1) {
                out.write(buffer, 0, length);
            }
            out.flush();
        }
    } catch (Exception e) {
        log.error("Error while getting json solr result for discovery search recommendation", e);
        e.printStackTrace();
    }

}

From source file:org.dspace.statistics.SolrLogger.java

License:BSD License

public static void shardSolrIndex() throws IOException, SolrServerException {
    /*/*from   w w w.  j av a2  s  .  c o m*/
    Start by faceting by year so we can include each year in a separate core !
     */
    SolrQuery yearRangeQuery = new SolrQuery();
    yearRangeQuery.setQuery("*:*");
    yearRangeQuery.setRows(0);
    yearRangeQuery.setFacet(true);
    yearRangeQuery.add(FacetParams.FACET_RANGE, "time");
    //We go back to 2000 the year 2000, this is a bit overkill but this way we ensure we have everything
    //The alternative would be to sort but that isn't recommended since it would be a very costly query !
    yearRangeQuery.add(FacetParams.FACET_RANGE_START,
            "NOW/YEAR-" + (Calendar.getInstance().get(Calendar.YEAR) - 2000) + "YEARS");
    //Add the +0year to ensure that we DO NOT include the current year
    yearRangeQuery.add(FacetParams.FACET_RANGE_END, "NOW/YEAR+0YEARS");
    yearRangeQuery.add(FacetParams.FACET_RANGE_GAP, "+1YEAR");
    yearRangeQuery.add(FacetParams.FACET_MINCOUNT, String.valueOf(1));

    //Create a temp directory to store our files in !
    File tempDirectory = new File(
            ConfigurationManager.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
    tempDirectory.mkdirs();

    QueryResponse queryResponse = solr.query(yearRangeQuery);
    //We only have one range query !
    List<RangeFacet.Count> yearResults = queryResponse.getFacetRanges().get(0).getCounts();
    for (RangeFacet.Count count : yearResults) {
        long totalRecords = count.getCount();

        //Create a range query from this !
        //We start with out current year
        DCDate dcStart = new DCDate(count.getValue());
        Calendar endDate = Calendar.getInstance();
        //Advance one year for the start of the next one !
        endDate.setTime(dcStart.toDate());
        endDate.add(Calendar.YEAR, 1);
        DCDate dcEndDate = new DCDate(endDate.getTime());

        StringBuilder filterQuery = new StringBuilder();
        filterQuery.append("time:([");
        filterQuery.append(ClientUtils.escapeQueryChars(dcStart.toString()));
        filterQuery.append(" TO ");
        filterQuery.append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append("]");
        //The next part of the filter query excludes the content from midnight of the next year !
        filterQuery.append(" NOT ").append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append(")");

        Map<String, String> yearQueryParams = new HashMap<String, String>();
        yearQueryParams.put(CommonParams.Q, "*:*");
        yearQueryParams.put(CommonParams.ROWS, String.valueOf(10000));
        yearQueryParams.put(CommonParams.FQ, filterQuery.toString());
        yearQueryParams.put(CommonParams.WT, "csv");

        //Start by creating a new core
        String coreName = "statistics-" + dcStart.getYear();
        HttpSolrServer statisticsYearServer = createCore(solr, coreName);

        System.out.println("Moving: " + totalRecords + " into core " + coreName);
        log.info("Moving: " + totalRecords + " records into core " + coreName);

        List<File> filesToUpload = new ArrayList<File>();
        for (int i = 0; i < totalRecords; i += 10000) {
            String solrRequestUrl = solr.getBaseURL() + "/select";
            solrRequestUrl = generateURL(solrRequestUrl, yearQueryParams);

            HttpGet get = new HttpGet(solrRequestUrl);
            HttpResponse response = new DefaultHttpClient().execute(get);
            InputStream csvInputstream = response.getEntity().getContent();
            //Write the csv ouput to a file !
            File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYear()
                    + "." + i + ".csv");
            FileUtils.copyInputStreamToFile(csvInputstream, csvFile);
            filesToUpload.add(csvFile);

            //Add 10000 & start over again
            yearQueryParams.put(CommonParams.START, String.valueOf((i + 10000)));
        }

        for (File tempCsv : filesToUpload) {
            //Upload the data in the csv files to our new solr core
            ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest(
                    "/update/csv");
            contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
            contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");

            statisticsYearServer.request(contentStreamUpdateRequest);
        }
        statisticsYearServer.commit(true, true);

        //Delete contents of this year from our year query !
        solr.deleteByQuery(filterQuery.toString());
        solr.commit(true, true);

        log.info("Moved " + totalRecords + " records into core: " + coreName);
    }

    FileUtils.deleteDirectory(tempDirectory);
}

From source file:org.dspace.statistics.SolrLogger.java

License:BSD License

public static void reindexBitstreamHits(boolean removeDeletedBitstreams) throws Exception {
    Context context = new Context();

    try {//www  .  j a  v a  2s .c  o m
        //First of all retrieve the total number of records to be updated
        SolrQuery query = new SolrQuery();
        query.setQuery("*:*");
        query.addFilterQuery("type:" + Constants.BITSTREAM);
        //Only retrieve records which do not have a bundle name
        query.addFilterQuery("-bundleName:[* TO *]");
        query.setRows(0);
        addAdditionalSolrYearCores(query);
        long totalRecords = solr.query(query).getResults().getNumFound();

        File tempDirectory = new File(
                ConfigurationManager.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
        tempDirectory.mkdirs();
        List<File> tempCsvFiles = new ArrayList<File>();
        for (int i = 0; i < totalRecords; i += 10000) {
            Map<String, String> params = new HashMap<String, String>();
            params.put(CommonParams.Q, "*:*");
            params.put(CommonParams.FQ, "-bundleName:[* TO *] AND type:" + Constants.BITSTREAM);
            params.put(CommonParams.WT, "csv");
            params.put(CommonParams.ROWS, String.valueOf(10000));
            params.put(CommonParams.START, String.valueOf(i));

            String solrRequestUrl = solr.getBaseURL() + "/select";
            solrRequestUrl = generateURL(solrRequestUrl, params);

            HttpGet get = new HttpGet(solrRequestUrl);
            HttpResponse response = new DefaultHttpClient().execute(get);

            InputStream csvOutput = response.getEntity().getContent();
            Reader csvReader = new InputStreamReader(csvOutput);
            List<String[]> rows = new CSVReader(csvReader).readAll();
            String[][] csvParsed = rows.toArray(new String[rows.size()][]);
            String[] header = csvParsed[0];
            //Attempt to find the bitstream id index !
            int idIndex = 0;
            for (int j = 0; j < header.length; j++) {
                if (header[j].equals("id")) {
                    idIndex = j;
                }
            }

            File tempCsv = new File(tempDirectory.getPath() + File.separatorChar + "temp." + i + ".csv");
            tempCsvFiles.add(tempCsv);
            CSVWriter csvp = new CSVWriter(new FileWriter(tempCsv));
            //csvp.setAlwaysQuote(false);

            //Write the header !
            csvp.writeNext((String[]) ArrayUtils.add(header, "bundleName"));
            Map<Integer, String> bitBundleCache = new HashMap<Integer, String>();
            //Loop over each line (skip the headers though)!
            for (int j = 1; j < csvParsed.length; j++) {
                String[] csvLine = csvParsed[j];
                //Write the default line !
                int bitstreamId = Integer.parseInt(csvLine[idIndex]);
                //Attempt to retrieve our bundle name from the cache !
                String bundleName = bitBundleCache.get(bitstreamId);
                if (bundleName == null) {
                    //Nothing found retrieve the bitstream
                    Bitstream bitstream = Bitstream.find(context, bitstreamId);
                    //Attempt to retrieve our bitstream !
                    if (bitstream != null) {
                        Bundle[] bundles = bitstream.getBundles();
                        if (bundles != null && 0 < bundles.length) {
                            Bundle bundle = bundles[0];
                            bundleName = bundle.getName();
                            context.removeCached(bundle, bundle.getID());
                        } else {
                            //No bundle found, we are either a collection or a community logo, check for it !
                            DSpaceObject parentObject = bitstream.getParentObject();
                            if (parentObject instanceof Collection) {
                                bundleName = "LOGO-COLLECTION";
                            } else if (parentObject instanceof Community) {
                                bundleName = "LOGO-COMMUNITY";
                            }
                            if (parentObject != null) {
                                context.removeCached(parentObject, parentObject.getID());
                            }

                        }
                        //Cache the bundle name
                        bitBundleCache.put(bitstream.getID(), bundleName);
                        //Remove the bitstream from cache
                        context.removeCached(bitstream, bitstreamId);
                    }
                    //Check if we don't have a bundlename
                    //If we don't have one & we do not need to delete the deleted bitstreams ensure that a BITSTREAM_DELETED bundle name is given !
                    if (bundleName == null && !removeDeletedBitstreams) {
                        bundleName = "BITSTREAM_DELETED";
                    }
                }
                csvp.writeNext((String[]) ArrayUtils.add(csvLine, bundleName));
            }

            //Loop over our parsed csv
            csvp.flush();
            csvp.close();
        }

        //Add all the separate csv files
        for (File tempCsv : tempCsvFiles) {
            ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest(
                    "/update/csv");
            contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
            contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");

            solr.request(contentStreamUpdateRequest);
        }

        //Now that all our new bitstream stats are in place, delete all the old ones !
        solr.deleteByQuery("-bundleName:[* TO *] AND type:" + Constants.BITSTREAM);
        //Commit everything to wrap up
        solr.commit(true, true);
        //Clean up our directory !
        FileUtils.deleteDirectory(tempDirectory);
    } catch (Exception e) {
        log.error("Error while updating the bitstream statistics", e);
        throw e;
    } finally {
        context.abort();
    }
}

From source file:org.dspace.statistics.SolrLoggerServiceImpl.java

License:BSD License

@Override
public void shardSolrIndex() throws IOException, SolrServerException {
    /*//from www  .  ja  v  a 2 s . c o m
    Start by faceting by year so we can include each year in a separate core !
     */
    SolrQuery yearRangeQuery = new SolrQuery();
    yearRangeQuery.setQuery("*:*");
    yearRangeQuery.setRows(0);
    yearRangeQuery.setFacet(true);
    yearRangeQuery.add(FacetParams.FACET_RANGE, "time");
    //We go back to 2000 the year 2000, this is a bit overkill but this way we ensure we have everything
    //The alternative would be to sort but that isn't recommended since it would be a very costly query !
    yearRangeQuery.add(FacetParams.FACET_RANGE_START,
            "NOW/YEAR-" + (Calendar.getInstance().get(Calendar.YEAR) - 2000) + "YEARS");
    //Add the +0year to ensure that we DO NOT include the current year
    yearRangeQuery.add(FacetParams.FACET_RANGE_END, "NOW/YEAR+0YEARS");
    yearRangeQuery.add(FacetParams.FACET_RANGE_GAP, "+1YEAR");
    yearRangeQuery.add(FacetParams.FACET_MINCOUNT, String.valueOf(1));

    //Create a temp directory to store our files in !
    File tempDirectory = new File(
            configurationService.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
    tempDirectory.mkdirs();

    QueryResponse queryResponse = solr.query(yearRangeQuery);
    //We only have one range query !
    List<RangeFacet.Count> yearResults = queryResponse.getFacetRanges().get(0).getCounts();
    for (RangeFacet.Count count : yearResults) {
        long totalRecords = count.getCount();

        //Create a range query from this !
        //We start with out current year
        DCDate dcStart = new DCDate(count.getValue());
        Calendar endDate = Calendar.getInstance();
        //Advance one year for the start of the next one !
        endDate.setTime(dcStart.toDate());
        endDate.add(Calendar.YEAR, 1);
        DCDate dcEndDate = new DCDate(endDate.getTime());

        StringBuilder filterQuery = new StringBuilder();
        filterQuery.append("time:([");
        filterQuery.append(ClientUtils.escapeQueryChars(dcStart.toString()));
        filterQuery.append(" TO ");
        filterQuery.append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append("]");
        //The next part of the filter query excludes the content from midnight of the next year !
        filterQuery.append(" NOT ").append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append(")");

        Map<String, String> yearQueryParams = new HashMap<String, String>();
        yearQueryParams.put(CommonParams.Q, "*:*");
        yearQueryParams.put(CommonParams.ROWS, String.valueOf(10000));
        yearQueryParams.put(CommonParams.FQ, filterQuery.toString());
        yearQueryParams.put(CommonParams.WT, "csv");

        //Start by creating a new core
        String coreName = "statistics-" + dcStart.getYear();
        HttpSolrServer statisticsYearServer = createCore(solr, coreName);

        System.out.println("Moving: " + totalRecords + " into core " + coreName);
        log.info("Moving: " + totalRecords + " records into core " + coreName);

        List<File> filesToUpload = new ArrayList<File>();
        for (int i = 0; i < totalRecords; i += 10000) {
            String solrRequestUrl = solr.getBaseURL() + "/select";
            solrRequestUrl = generateURL(solrRequestUrl, yearQueryParams);

            HttpGet get = new HttpGet(solrRequestUrl);
            HttpResponse response = new DefaultHttpClient().execute(get);
            InputStream csvInputstream = response.getEntity().getContent();
            //Write the csv ouput to a file !
            File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYear()
                    + "." + i + ".csv");
            FileUtils.copyInputStreamToFile(csvInputstream, csvFile);
            filesToUpload.add(csvFile);

            //Add 10000 & start over again
            yearQueryParams.put(CommonParams.START, String.valueOf((i + 10000)));
        }

        for (File tempCsv : filesToUpload) {
            //Upload the data in the csv files to our new solr core
            ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest(
                    "/update/csv");
            contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
            contentStreamUpdateRequest.setParam("skip", "_version_");
            contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");

            statisticsYearServer.request(contentStreamUpdateRequest);
        }
        statisticsYearServer.commit(true, true);

        //Delete contents of this year from our year query !
        solr.deleteByQuery(filterQuery.toString());
        solr.commit(true, true);

        log.info("Moved " + totalRecords + " records into core: " + coreName);
    }

    FileUtils.deleteDirectory(tempDirectory);
}

From source file:org.dspace.statistics.SolrLoggerServiceImpl.java

License:BSD License

@Override
public void reindexBitstreamHits(boolean removeDeletedBitstreams) throws Exception {
    Context context = new Context();

    try {// w ww  . j  av a 2  s  .  c o m
        //First of all retrieve the total number of records to be updated
        SolrQuery query = new SolrQuery();
        query.setQuery("*:*");
        query.addFilterQuery("type:" + Constants.BITSTREAM);
        //Only retrieve records which do not have a bundle name
        query.addFilterQuery("-bundleName:[* TO *]");
        query.setRows(0);
        addAdditionalSolrYearCores(query);
        long totalRecords = solr.query(query).getResults().getNumFound();

        File tempDirectory = new File(
                configurationService.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
        tempDirectory.mkdirs();
        List<File> tempCsvFiles = new ArrayList<File>();
        for (int i = 0; i < totalRecords; i += 10000) {
            Map<String, String> params = new HashMap<String, String>();
            params.put(CommonParams.Q, "*:*");
            params.put(CommonParams.FQ, "-bundleName:[* TO *] AND type:" + Constants.BITSTREAM);
            params.put(CommonParams.WT, "csv");
            params.put(CommonParams.ROWS, String.valueOf(10000));
            params.put(CommonParams.START, String.valueOf(i));

            String solrRequestUrl = solr.getBaseURL() + "/select";
            solrRequestUrl = generateURL(solrRequestUrl, params);

            HttpGet get = new HttpGet(solrRequestUrl);
            HttpResponse response = new DefaultHttpClient().execute(get);

            InputStream csvOutput = response.getEntity().getContent();
            Reader csvReader = new InputStreamReader(csvOutput);
            List<String[]> rows = new CSVReader(csvReader).readAll();
            String[][] csvParsed = rows.toArray(new String[rows.size()][]);
            String[] header = csvParsed[0];
            //Attempt to find the bitstream id index !
            int idIndex = 0;
            for (int j = 0; j < header.length; j++) {
                if (header[j].equals("id")) {
                    idIndex = j;
                }
            }

            File tempCsv = new File(tempDirectory.getPath() + File.separatorChar + "temp." + i + ".csv");
            tempCsvFiles.add(tempCsv);
            CSVWriter csvp = new CSVWriter(new FileWriter(tempCsv));
            //csvp.setAlwaysQuote(false);

            //Write the header !
            csvp.writeNext((String[]) ArrayUtils.add(header, "bundleName"));
            Map<String, String> bitBundleCache = new HashMap<>();
            //Loop over each line (skip the headers though)!
            for (int j = 1; j < csvParsed.length; j++) {
                String[] csvLine = csvParsed[j];
                //Write the default line !
                String bitstreamId = csvLine[idIndex];
                //Attempt to retrieve our bundle name from the cache !
                String bundleName = bitBundleCache.get(bitstreamId);
                if (bundleName == null) {
                    //Nothing found retrieve the bitstream
                    Bitstream bitstream = bitstreamService.findByIdOrLegacyId(context, bitstreamId);
                    //Attempt to retrieve our bitstream !
                    if (bitstream != null) {
                        List<Bundle> bundles = bitstream.getBundles();
                        if (bundles != null && 0 < bundles.size()) {
                            Bundle bundle = bundles.get(0);
                            bundleName = bundle.getName();
                        } else {
                            //No bundle found, we are either a collection or a community logo, check for it !
                            DSpaceObject parentObject = bitstreamService.getParentObject(context, bitstream);
                            if (parentObject instanceof Collection) {
                                bundleName = "LOGO-COLLECTION";
                            } else if (parentObject instanceof Community) {
                                bundleName = "LOGO-COMMUNITY";
                            }

                        }
                        //Cache the bundle name
                        bitBundleCache.put(bitstream.getID().toString(), bundleName);
                        //Remove the bitstream from cache
                    }
                    //Check if we don't have a bundlename
                    //If we don't have one & we do not need to delete the deleted bitstreams ensure that a BITSTREAM_DELETED bundle name is given !
                    if (bundleName == null && !removeDeletedBitstreams) {
                        bundleName = "BITSTREAM_DELETED";
                    }
                }
                csvp.writeNext((String[]) ArrayUtils.add(csvLine, bundleName));
            }

            //Loop over our parsed csv
            csvp.flush();
            csvp.close();
        }

        //Add all the separate csv files
        for (File tempCsv : tempCsvFiles) {
            ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest(
                    "/update/csv");
            contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
            contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");

            solr.request(contentStreamUpdateRequest);
        }

        //Now that all our new bitstream stats are in place, delete all the old ones !
        solr.deleteByQuery("-bundleName:[* TO *] AND type:" + Constants.BITSTREAM);
        //Commit everything to wrap up
        solr.commit(true, true);
        //Clean up our directory !
        FileUtils.deleteDirectory(tempDirectory);
    } catch (Exception e) {
        log.error("Error while updating the bitstream statistics", e);
        throw e;
    } finally {
        context.abort();
    }
}

From source file:org.opencms.main.OpenCmsSolrHandler.java

License:Open Source License

/**
 * @see org.opencms.main.I_CmsRequestHandler#handle(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse, java.lang.String)
 *///from   w  w  w.j av a2s .c  o m
public void handle(HttpServletRequest req, HttpServletResponse res, String name) throws IOException {

    final HANDLER_NAMES handlerName = HANDLER_NAMES.valueOf(name);
    if (handlerName != null) {
        try {
            Context context = initializeRequest(req, res);
            if ((context.m_params.get(CommonParams.Q) != null)
                    || (context.m_params.get(CommonParams.FQ) != null)) {
                switch (handlerName) {
                case SolrSelect:
                    context.m_index.select(res, context.m_cms, context.m_query, true);
                    break;
                case SolrSpell:
                    context.m_index.spellCheck(res, context.m_cms, context.m_query);
                    break;
                default:
                    break;
                }
            }
        } catch (Exception e) {
            res.setStatus(HttpServletResponse.SC_EXPECTATION_FAILED);
            String message = Messages.get().getBundle().key(Messages.GUI_SOLR_UNEXPECTED_ERROR_0);
            String formattedException = CmsException.getStackTraceAsString(e).replace("\n", "<br/>");
            res.getWriter().println(Messages.get().getBundle().key(Messages.GUI_SOLR_ERROR_HTML_1,
                    message + formattedException));
        }
    }
}

From source file:org.opencms.search.solr.CmsSolrQuery.java

License:Open Source License

/**
 * Ensures that the initial request parameters will overwrite the member values.<p>
 *
 * You can initialize the query with an HTTP request parameter then make some method calls
 * and finally re-ensure that the initial request parameters will overwrite the changes
 * made in the meanwhile.<p>// ww  w  .ja  va  2  s.  c  o m
 */
public void ensureParameters() {

    // overwrite already set values with values from query String
    if ((m_queryParameters != null) && !m_queryParameters.isEmpty()) {
        for (Map.Entry<String, String[]> entry : m_queryParameters.entrySet()) {
            if (!entry.getKey().equals(CommonParams.FQ)) {
                // add or replace all parameters from the query String
                setParam(entry.getKey(), entry.getValue());
            } else {
                // special handling for filter queries
                replaceFilterQueries(entry.getValue());
            }
        }
    }
}

From source file:org.opensextant.extractors.geo.SolrGazetteer.java

License:Apache License

/**
 * Variation on placesAt()./*from   w  ww. j a v  a 2s  . c  o  m*/
 *
 * @param yx
 *            location
 * @param withinKM
 *            distance - required.
 * @param feature
 *            feature class
 * @return unsorted list of places near location
 * @throws SolrServerException
 *             on err
 */
public List<Place> placesAt(LatLon yx, int withinKM, String feature) throws SolrServerException {

    /*
     */
    ModifiableSolrParams spatialQuery = createGeodeticLookupParams();
    spatialQuery.set(CommonParams.FQ, String.format("feat_class:%s", feature));

    // The point in question.
    spatialQuery.set("pt", GeodeticUtility.formatLatLon(yx));
    // Example: Find places within 50KM, but only first N rows returned.
    spatialQuery.set("d", withinKM);
    return SolrProxy.searchGazetteer(solr.getInternalSolrServer(), spatialQuery);
}

From source file:org.opensextant.solrtexttagger.TaggerTest.java

License:Open Source License

@Test
public void testMultipleFilterQueries() throws Exception {
    baseParams.set("qt", "/tag");
    baseParams.set("overlaps", "ALL");

    // build up the corpus with some additional fields for filtering purposes
    deleteByQueryAndGetVersion("*:*", null);

    int i = 0;/*from   www . j  a  v a 2 s .  c  o  m*/
    assertU(adoc("id", "" + i++, "name", N.London.getName(), "type", "city", "country", "UK"));
    assertU(adoc("id", "" + i++, "name", N.London_Business_School.getName(), "type", "school", "country",
            "UK"));
    assertU(adoc("id", "" + i++, "name", N.Boston.getName(), "type", "city", "country", "US"));
    assertU(adoc("id", "" + i++, "name", N.City_of_London.getName(), "type", "org", "country", "UK"));
    assertU(commit());

    // not calling buildNames so that we can bring along extra attributes for filtering
    NAMES = Arrays.stream(N.values()).map(N::getName).collect(Collectors.toList());

    // phrase that matches everything
    String doc = "City of London Business School in Boston";

    // first do no filtering
    ModifiableSolrParams p = new ModifiableSolrParams();
    p.add(CommonParams.Q, "*:*");
    assertTags(reqDoc(doc, p), tt(doc, "City of London", 0, N.City_of_London), tt(doc, "London", 0, N.London),
            tt(doc, "London Business School", 0, N.London_Business_School), tt(doc, "Boston", 0, N.Boston));

    // add a single fq
    p.add(CommonParams.FQ, "type:city");
    assertTags(reqDoc(doc, p), tt(doc, "London", 0, N.London), tt(doc, "Boston", 0, N.Boston));

    // add another fq
    p.add(CommonParams.FQ, "country:US");
    assertTags(reqDoc(doc, p), tt(doc, "Boston", 0, N.Boston));
}

From source file:org.phenotips.ontology.internal.solr.SolrQueryUtilsTest.java

License:Open Source License

@Test
public void testGetCacheKey() {
    ModifiableSolrParams input = new ModifiableSolrParams();
    input.set(CommonParams.Q, "some value");
    input.add(DisMaxParams.BQ, "text:stub*");
    input.add(DisMaxParams.BQ, "stub*");
    input.set(CommonParams.FQ, "is_a:HP\\:0000108");
    String output = SolrQueryUtils.getCacheKey(input);
    Assert.assertEquals("{q:[some value]\nbq:[text:stub*, stub*]\nfq:[is_a:HP\\:0000108]\n}", output);
}