Example usage for org.apache.solr.common.params CommonParams WT

List of usage examples for org.apache.solr.common.params CommonParams WT

Introduction

In this page you can find the example usage for org.apache.solr.common.params CommonParams WT.

Prototype

String WT

To view the source code for org.apache.solr.common.params CommonParams WT.

Click Source Link

Document

the response writer type - the format of the response

Usage

From source file:org.dspace.statistics.SolrLogger.java

License:BSD License

public static void shardSolrIndex() throws IOException, SolrServerException {
    /*/*ww w .  j a  va2s  .com*/
    Start by faceting by year so we can include each year in a separate core !
     */
    SolrQuery yearRangeQuery = new SolrQuery();
    yearRangeQuery.setQuery("*:*");
    yearRangeQuery.setRows(0);
    yearRangeQuery.setFacet(true);
    yearRangeQuery.add(FacetParams.FACET_RANGE, "time");
    //We go back to 2000 the year 2000, this is a bit overkill but this way we ensure we have everything
    //The alternative would be to sort but that isn't recommended since it would be a very costly query !
    yearRangeQuery.add(FacetParams.FACET_RANGE_START,
            "NOW/YEAR-" + (Calendar.getInstance().get(Calendar.YEAR) - 2000) + "YEARS");
    //Add the +0year to ensure that we DO NOT include the current year
    yearRangeQuery.add(FacetParams.FACET_RANGE_END, "NOW/YEAR+0YEARS");
    yearRangeQuery.add(FacetParams.FACET_RANGE_GAP, "+1YEAR");
    yearRangeQuery.add(FacetParams.FACET_MINCOUNT, String.valueOf(1));

    //Create a temp directory to store our files in !
    File tempDirectory = new File(
            ConfigurationManager.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
    tempDirectory.mkdirs();

    QueryResponse queryResponse = solr.query(yearRangeQuery);
    //We only have one range query !
    List<RangeFacet.Count> yearResults = queryResponse.getFacetRanges().get(0).getCounts();
    for (RangeFacet.Count count : yearResults) {
        long totalRecords = count.getCount();

        //Create a range query from this !
        //We start with out current year
        DCDate dcStart = new DCDate(count.getValue());
        Calendar endDate = Calendar.getInstance();
        //Advance one year for the start of the next one !
        endDate.setTime(dcStart.toDate());
        endDate.add(Calendar.YEAR, 1);
        DCDate dcEndDate = new DCDate(endDate.getTime());

        StringBuilder filterQuery = new StringBuilder();
        filterQuery.append("time:([");
        filterQuery.append(ClientUtils.escapeQueryChars(dcStart.toString()));
        filterQuery.append(" TO ");
        filterQuery.append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append("]");
        //The next part of the filter query excludes the content from midnight of the next year !
        filterQuery.append(" NOT ").append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append(")");

        Map<String, String> yearQueryParams = new HashMap<String, String>();
        yearQueryParams.put(CommonParams.Q, "*:*");
        yearQueryParams.put(CommonParams.ROWS, String.valueOf(10000));
        yearQueryParams.put(CommonParams.FQ, filterQuery.toString());
        yearQueryParams.put(CommonParams.WT, "csv");

        //Start by creating a new core
        String coreName = "statistics-" + dcStart.getYear();
        HttpSolrServer statisticsYearServer = createCore(solr, coreName);

        System.out.println("Moving: " + totalRecords + " into core " + coreName);
        log.info("Moving: " + totalRecords + " records into core " + coreName);

        List<File> filesToUpload = new ArrayList<File>();
        for (int i = 0; i < totalRecords; i += 10000) {
            String solrRequestUrl = solr.getBaseURL() + "/select";
            solrRequestUrl = generateURL(solrRequestUrl, yearQueryParams);

            HttpGet get = new HttpGet(solrRequestUrl);
            HttpResponse response = new DefaultHttpClient().execute(get);
            InputStream csvInputstream = response.getEntity().getContent();
            //Write the csv ouput to a file !
            File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYear()
                    + "." + i + ".csv");
            FileUtils.copyInputStreamToFile(csvInputstream, csvFile);
            filesToUpload.add(csvFile);

            //Add 10000 & start over again
            yearQueryParams.put(CommonParams.START, String.valueOf((i + 10000)));
        }

        for (File tempCsv : filesToUpload) {
            //Upload the data in the csv files to our new solr core
            ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest(
                    "/update/csv");
            contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
            contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");

            statisticsYearServer.request(contentStreamUpdateRequest);
        }
        statisticsYearServer.commit(true, true);

        //Delete contents of this year from our year query !
        solr.deleteByQuery(filterQuery.toString());
        solr.commit(true, true);

        log.info("Moved " + totalRecords + " records into core: " + coreName);
    }

    FileUtils.deleteDirectory(tempDirectory);
}

From source file:org.dspace.statistics.SolrLogger.java

License:BSD License

public static void reindexBitstreamHits(boolean removeDeletedBitstreams) throws Exception {
    Context context = new Context();

    try {/*w ww. j  a va2  s  .c  om*/
        //First of all retrieve the total number of records to be updated
        SolrQuery query = new SolrQuery();
        query.setQuery("*:*");
        query.addFilterQuery("type:" + Constants.BITSTREAM);
        //Only retrieve records which do not have a bundle name
        query.addFilterQuery("-bundleName:[* TO *]");
        query.setRows(0);
        addAdditionalSolrYearCores(query);
        long totalRecords = solr.query(query).getResults().getNumFound();

        File tempDirectory = new File(
                ConfigurationManager.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
        tempDirectory.mkdirs();
        List<File> tempCsvFiles = new ArrayList<File>();
        for (int i = 0; i < totalRecords; i += 10000) {
            Map<String, String> params = new HashMap<String, String>();
            params.put(CommonParams.Q, "*:*");
            params.put(CommonParams.FQ, "-bundleName:[* TO *] AND type:" + Constants.BITSTREAM);
            params.put(CommonParams.WT, "csv");
            params.put(CommonParams.ROWS, String.valueOf(10000));
            params.put(CommonParams.START, String.valueOf(i));

            String solrRequestUrl = solr.getBaseURL() + "/select";
            solrRequestUrl = generateURL(solrRequestUrl, params);

            HttpGet get = new HttpGet(solrRequestUrl);
            HttpResponse response = new DefaultHttpClient().execute(get);

            InputStream csvOutput = response.getEntity().getContent();
            Reader csvReader = new InputStreamReader(csvOutput);
            List<String[]> rows = new CSVReader(csvReader).readAll();
            String[][] csvParsed = rows.toArray(new String[rows.size()][]);
            String[] header = csvParsed[0];
            //Attempt to find the bitstream id index !
            int idIndex = 0;
            for (int j = 0; j < header.length; j++) {
                if (header[j].equals("id")) {
                    idIndex = j;
                }
            }

            File tempCsv = new File(tempDirectory.getPath() + File.separatorChar + "temp." + i + ".csv");
            tempCsvFiles.add(tempCsv);
            CSVWriter csvp = new CSVWriter(new FileWriter(tempCsv));
            //csvp.setAlwaysQuote(false);

            //Write the header !
            csvp.writeNext((String[]) ArrayUtils.add(header, "bundleName"));
            Map<Integer, String> bitBundleCache = new HashMap<Integer, String>();
            //Loop over each line (skip the headers though)!
            for (int j = 1; j < csvParsed.length; j++) {
                String[] csvLine = csvParsed[j];
                //Write the default line !
                int bitstreamId = Integer.parseInt(csvLine[idIndex]);
                //Attempt to retrieve our bundle name from the cache !
                String bundleName = bitBundleCache.get(bitstreamId);
                if (bundleName == null) {
                    //Nothing found retrieve the bitstream
                    Bitstream bitstream = Bitstream.find(context, bitstreamId);
                    //Attempt to retrieve our bitstream !
                    if (bitstream != null) {
                        Bundle[] bundles = bitstream.getBundles();
                        if (bundles != null && 0 < bundles.length) {
                            Bundle bundle = bundles[0];
                            bundleName = bundle.getName();
                            context.removeCached(bundle, bundle.getID());
                        } else {
                            //No bundle found, we are either a collection or a community logo, check for it !
                            DSpaceObject parentObject = bitstream.getParentObject();
                            if (parentObject instanceof Collection) {
                                bundleName = "LOGO-COLLECTION";
                            } else if (parentObject instanceof Community) {
                                bundleName = "LOGO-COMMUNITY";
                            }
                            if (parentObject != null) {
                                context.removeCached(parentObject, parentObject.getID());
                            }

                        }
                        //Cache the bundle name
                        bitBundleCache.put(bitstream.getID(), bundleName);
                        //Remove the bitstream from cache
                        context.removeCached(bitstream, bitstreamId);
                    }
                    //Check if we don't have a bundlename
                    //If we don't have one & we do not need to delete the deleted bitstreams ensure that a BITSTREAM_DELETED bundle name is given !
                    if (bundleName == null && !removeDeletedBitstreams) {
                        bundleName = "BITSTREAM_DELETED";
                    }
                }
                csvp.writeNext((String[]) ArrayUtils.add(csvLine, bundleName));
            }

            //Loop over our parsed csv
            csvp.flush();
            csvp.close();
        }

        //Add all the separate csv files
        for (File tempCsv : tempCsvFiles) {
            ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest(
                    "/update/csv");
            contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
            contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");

            solr.request(contentStreamUpdateRequest);
        }

        //Now that all our new bitstream stats are in place, delete all the old ones !
        solr.deleteByQuery("-bundleName:[* TO *] AND type:" + Constants.BITSTREAM);
        //Commit everything to wrap up
        solr.commit(true, true);
        //Clean up our directory !
        FileUtils.deleteDirectory(tempDirectory);
    } catch (Exception e) {
        log.error("Error while updating the bitstream statistics", e);
        throw e;
    } finally {
        context.abort();
    }
}

From source file:org.dspace.statistics.SolrLogger.java

License:BSD License

/**
 * Export all SOLR usage statistics for viewing/downloading content to a flat text file.
 * The file goes to a series// w w w . j av a 2 s  .  co  m
 *
 * @throws Exception
 */
public static void exportHits() throws Exception {
    Context context = new Context();

    File tempDirectory = new File(
            ConfigurationManager.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
    tempDirectory.mkdirs();

    try {
        //First of all retrieve the total number of records to be updated
        SolrQuery query = new SolrQuery();
        query.setQuery("*:*");

        ModifiableSolrParams solrParams = new ModifiableSolrParams();
        solrParams.set(CommonParams.Q, "statistics_type:view OR (*:* AND -statistics_type:*)");
        solrParams.set(CommonParams.WT, "javabin");
        solrParams.set(CommonParams.ROWS, String.valueOf(10000));

        addAdditionalSolrYearCores(query);
        long totalRecords = solr.query(query).getResults().getNumFound();
        System.out.println("There are " + totalRecords + " usage events in SOLR for download/view.");

        for (int i = 0; i < totalRecords; i += 10000) {
            solrParams.set(CommonParams.START, String.valueOf(i));
            QueryResponse queryResponse = solr.query(solrParams);
            SolrDocumentList docs = queryResponse.getResults();

            File exportOutput = new File(
                    tempDirectory.getPath() + File.separatorChar + "usagestats_" + i + ".csv");
            exportOutput.delete();

            //export docs
            addDocumentsToFile(context, docs, exportOutput);
            System.out.println("Export hits [" + i + " - " + String.valueOf(i + 9999) + "] to "
                    + exportOutput.getCanonicalPath());
        }
    } catch (Exception e) {
        log.error("Error while exporting SOLR data", e);
        throw e;
    } finally {
        context.abort();
    }
}

From source file:org.dspace.statistics.SolrLoggerServiceImpl.java

License:BSD License

@Override
public void shardSolrIndex() throws IOException, SolrServerException {
    /*//from   ww  w .  j  a  v  a2  s . c o m
    Start by faceting by year so we can include each year in a separate core !
     */
    SolrQuery yearRangeQuery = new SolrQuery();
    yearRangeQuery.setQuery("*:*");
    yearRangeQuery.setRows(0);
    yearRangeQuery.setFacet(true);
    yearRangeQuery.add(FacetParams.FACET_RANGE, "time");
    //We go back to 2000 the year 2000, this is a bit overkill but this way we ensure we have everything
    //The alternative would be to sort but that isn't recommended since it would be a very costly query !
    yearRangeQuery.add(FacetParams.FACET_RANGE_START,
            "NOW/YEAR-" + (Calendar.getInstance().get(Calendar.YEAR) - 2000) + "YEARS");
    //Add the +0year to ensure that we DO NOT include the current year
    yearRangeQuery.add(FacetParams.FACET_RANGE_END, "NOW/YEAR+0YEARS");
    yearRangeQuery.add(FacetParams.FACET_RANGE_GAP, "+1YEAR");
    yearRangeQuery.add(FacetParams.FACET_MINCOUNT, String.valueOf(1));

    //Create a temp directory to store our files in !
    File tempDirectory = new File(
            configurationService.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
    tempDirectory.mkdirs();

    QueryResponse queryResponse = solr.query(yearRangeQuery);
    //We only have one range query !
    List<RangeFacet.Count> yearResults = queryResponse.getFacetRanges().get(0).getCounts();
    for (RangeFacet.Count count : yearResults) {
        long totalRecords = count.getCount();

        //Create a range query from this !
        //We start with out current year
        DCDate dcStart = new DCDate(count.getValue());
        Calendar endDate = Calendar.getInstance();
        //Advance one year for the start of the next one !
        endDate.setTime(dcStart.toDate());
        endDate.add(Calendar.YEAR, 1);
        DCDate dcEndDate = new DCDate(endDate.getTime());

        StringBuilder filterQuery = new StringBuilder();
        filterQuery.append("time:([");
        filterQuery.append(ClientUtils.escapeQueryChars(dcStart.toString()));
        filterQuery.append(" TO ");
        filterQuery.append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append("]");
        //The next part of the filter query excludes the content from midnight of the next year !
        filterQuery.append(" NOT ").append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append(")");

        Map<String, String> yearQueryParams = new HashMap<String, String>();
        yearQueryParams.put(CommonParams.Q, "*:*");
        yearQueryParams.put(CommonParams.ROWS, String.valueOf(10000));
        yearQueryParams.put(CommonParams.FQ, filterQuery.toString());
        yearQueryParams.put(CommonParams.WT, "csv");

        //Start by creating a new core
        String coreName = "statistics-" + dcStart.getYear();
        HttpSolrServer statisticsYearServer = createCore(solr, coreName);

        System.out.println("Moving: " + totalRecords + " into core " + coreName);
        log.info("Moving: " + totalRecords + " records into core " + coreName);

        List<File> filesToUpload = new ArrayList<File>();
        for (int i = 0; i < totalRecords; i += 10000) {
            String solrRequestUrl = solr.getBaseURL() + "/select";
            solrRequestUrl = generateURL(solrRequestUrl, yearQueryParams);

            HttpGet get = new HttpGet(solrRequestUrl);
            HttpResponse response = new DefaultHttpClient().execute(get);
            InputStream csvInputstream = response.getEntity().getContent();
            //Write the csv ouput to a file !
            File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYear()
                    + "." + i + ".csv");
            FileUtils.copyInputStreamToFile(csvInputstream, csvFile);
            filesToUpload.add(csvFile);

            //Add 10000 & start over again
            yearQueryParams.put(CommonParams.START, String.valueOf((i + 10000)));
        }

        for (File tempCsv : filesToUpload) {
            //Upload the data in the csv files to our new solr core
            ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest(
                    "/update/csv");
            contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
            contentStreamUpdateRequest.setParam("skip", "_version_");
            contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");

            statisticsYearServer.request(contentStreamUpdateRequest);
        }
        statisticsYearServer.commit(true, true);

        //Delete contents of this year from our year query !
        solr.deleteByQuery(filterQuery.toString());
        solr.commit(true, true);

        log.info("Moved " + totalRecords + " records into core: " + coreName);
    }

    FileUtils.deleteDirectory(tempDirectory);
}

From source file:org.dspace.statistics.SolrLoggerServiceImpl.java

License:BSD License

@Override
public void reindexBitstreamHits(boolean removeDeletedBitstreams) throws Exception {
    Context context = new Context();

    try {/* ww  w  .  j av a2 s.c om*/
        //First of all retrieve the total number of records to be updated
        SolrQuery query = new SolrQuery();
        query.setQuery("*:*");
        query.addFilterQuery("type:" + Constants.BITSTREAM);
        //Only retrieve records which do not have a bundle name
        query.addFilterQuery("-bundleName:[* TO *]");
        query.setRows(0);
        addAdditionalSolrYearCores(query);
        long totalRecords = solr.query(query).getResults().getNumFound();

        File tempDirectory = new File(
                configurationService.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
        tempDirectory.mkdirs();
        List<File> tempCsvFiles = new ArrayList<File>();
        for (int i = 0; i < totalRecords; i += 10000) {
            Map<String, String> params = new HashMap<String, String>();
            params.put(CommonParams.Q, "*:*");
            params.put(CommonParams.FQ, "-bundleName:[* TO *] AND type:" + Constants.BITSTREAM);
            params.put(CommonParams.WT, "csv");
            params.put(CommonParams.ROWS, String.valueOf(10000));
            params.put(CommonParams.START, String.valueOf(i));

            String solrRequestUrl = solr.getBaseURL() + "/select";
            solrRequestUrl = generateURL(solrRequestUrl, params);

            HttpGet get = new HttpGet(solrRequestUrl);
            HttpResponse response = new DefaultHttpClient().execute(get);

            InputStream csvOutput = response.getEntity().getContent();
            Reader csvReader = new InputStreamReader(csvOutput);
            List<String[]> rows = new CSVReader(csvReader).readAll();
            String[][] csvParsed = rows.toArray(new String[rows.size()][]);
            String[] header = csvParsed[0];
            //Attempt to find the bitstream id index !
            int idIndex = 0;
            for (int j = 0; j < header.length; j++) {
                if (header[j].equals("id")) {
                    idIndex = j;
                }
            }

            File tempCsv = new File(tempDirectory.getPath() + File.separatorChar + "temp." + i + ".csv");
            tempCsvFiles.add(tempCsv);
            CSVWriter csvp = new CSVWriter(new FileWriter(tempCsv));
            //csvp.setAlwaysQuote(false);

            //Write the header !
            csvp.writeNext((String[]) ArrayUtils.add(header, "bundleName"));
            Map<String, String> bitBundleCache = new HashMap<>();
            //Loop over each line (skip the headers though)!
            for (int j = 1; j < csvParsed.length; j++) {
                String[] csvLine = csvParsed[j];
                //Write the default line !
                String bitstreamId = csvLine[idIndex];
                //Attempt to retrieve our bundle name from the cache !
                String bundleName = bitBundleCache.get(bitstreamId);
                if (bundleName == null) {
                    //Nothing found retrieve the bitstream
                    Bitstream bitstream = bitstreamService.findByIdOrLegacyId(context, bitstreamId);
                    //Attempt to retrieve our bitstream !
                    if (bitstream != null) {
                        List<Bundle> bundles = bitstream.getBundles();
                        if (bundles != null && 0 < bundles.size()) {
                            Bundle bundle = bundles.get(0);
                            bundleName = bundle.getName();
                        } else {
                            //No bundle found, we are either a collection or a community logo, check for it !
                            DSpaceObject parentObject = bitstreamService.getParentObject(context, bitstream);
                            if (parentObject instanceof Collection) {
                                bundleName = "LOGO-COLLECTION";
                            } else if (parentObject instanceof Community) {
                                bundleName = "LOGO-COMMUNITY";
                            }

                        }
                        //Cache the bundle name
                        bitBundleCache.put(bitstream.getID().toString(), bundleName);
                        //Remove the bitstream from cache
                    }
                    //Check if we don't have a bundlename
                    //If we don't have one & we do not need to delete the deleted bitstreams ensure that a BITSTREAM_DELETED bundle name is given !
                    if (bundleName == null && !removeDeletedBitstreams) {
                        bundleName = "BITSTREAM_DELETED";
                    }
                }
                csvp.writeNext((String[]) ArrayUtils.add(csvLine, bundleName));
            }

            //Loop over our parsed csv
            csvp.flush();
            csvp.close();
        }

        //Add all the separate csv files
        for (File tempCsv : tempCsvFiles) {
            ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest(
                    "/update/csv");
            contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
            contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");

            solr.request(contentStreamUpdateRequest);
        }

        //Now that all our new bitstream stats are in place, delete all the old ones !
        solr.deleteByQuery("-bundleName:[* TO *] AND type:" + Constants.BITSTREAM);
        //Commit everything to wrap up
        solr.commit(true, true);
        //Clean up our directory !
        FileUtils.deleteDirectory(tempDirectory);
    } catch (Exception e) {
        log.error("Error while updating the bitstream statistics", e);
        throw e;
    } finally {
        context.abort();
    }
}

From source file:org.dspace.statistics.SolrLoggerServiceImpl.java

License:BSD License

@Override
public void exportHits() throws Exception {
    Context context = new Context();

    File tempDirectory = new File(
            configurationService.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
    tempDirectory.mkdirs();// w  w  w  .j a  va 2 s  . co  m

    try {
        //First of all retrieve the total number of records to be updated
        SolrQuery query = new SolrQuery();
        query.setQuery("*:*");

        ModifiableSolrParams solrParams = new ModifiableSolrParams();
        solrParams.set(CommonParams.Q, "statistics_type:view OR (*:* AND -statistics_type:*)");
        solrParams.set(CommonParams.WT, "javabin");
        solrParams.set(CommonParams.ROWS, String.valueOf(10000));

        addAdditionalSolrYearCores(query);
        long totalRecords = solr.query(query).getResults().getNumFound();
        System.out.println("There are " + totalRecords + " usage events in SOLR for download/view.");

        for (int i = 0; i < totalRecords; i += 10000) {
            solrParams.set(CommonParams.START, String.valueOf(i));
            QueryResponse queryResponse = solr.query(solrParams);
            SolrDocumentList docs = queryResponse.getResults();

            File exportOutput = new File(
                    tempDirectory.getPath() + File.separatorChar + "usagestats_" + i + ".csv");
            exportOutput.delete();

            //export docs
            addDocumentsToFile(context, docs, exportOutput);
            System.out.println("Export hits [" + i + " - " + String.valueOf(i + 9999) + "] to "
                    + exportOutput.getCanonicalPath());
        }
    } catch (Exception e) {
        log.error("Error while exporting SOLR data", e);
        throw e;
    } finally {
        context.abort();
    }
}

From source file:org.eclipse.orion.internal.server.search.SearchServlet.java

License:Open Source License

private SolrQuery buildSolrQuery(HttpServletRequest req) {
    SolrQuery query = new SolrQuery();
    query.setParam(CommonParams.WT, "json");
    query.setParam(CommonParams.FL, "Id,Name,Length,Directory,LastModified,Location");
    query.setParam("hl", "true");
    String queryString = req.getParameter(CommonParams.Q);
    queryString += " AND " + ProtocolConstants.KEY_USER_NAME + ':' + req.getRemoteUser();
    query.setQuery(queryString);/*ww  w .  ja v  a 2 s .co  m*/
    return query;
}

From source file:org.solbase.SolbaseDispatchFilter.java

License:Apache License

@SuppressWarnings({ "unused", "unchecked" })
private void handleAdminRequest(HttpServletRequest req, ServletResponse response, SolrRequestHandler handler,
        SolrQueryRequest solrReq) throws IOException {
    SolrQueryResponse solrResp = new SolrQueryResponse();
    final NamedList<Object> responseHeader = new SimpleOrderedMap<Object>();
    solrResp.add("responseHeader", responseHeader);
    NamedList<Object> toLog = solrResp.getToLog();
    toLog.add("webapp", req.getContextPath());
    toLog.add("path", solrReq.getContext().get("path"));
    toLog.add("params", "{" + solrReq.getParamString() + "}");
    handler.handleRequest(solrReq, solrResp);
    SolrCore.setResponseHeaderValues(handler, solrReq, solrResp);
    StringBuilder sb = new StringBuilder();
    for (int i = 0; i < toLog.size(); i++) {
        String name = toLog.getName(i);
        Object val = toLog.getVal(i);
        sb.append(name).append("=").append(val).append(" ");
    }//from  ww  w.j  ava 2s . c om
    QueryResponseWriter respWriter = SolrCore.DEFAULT_RESPONSE_WRITERS
            .get(solrReq.getParams().get(CommonParams.WT));
    if (respWriter == null)
        respWriter = SolrCore.DEFAULT_RESPONSE_WRITERS.get("standard");
    writeResponse(solrResp, response, respWriter, solrReq, Method.getMethod(req.getMethod()));
}

From source file:org.vootoo.client.netty.NettySolrClient.java

License:Apache License

protected ResponseParser createRequest(SolrRequest request, ProtobufRequestSetter saveRequestSetter)
        throws SolrServerException, IOException {

    SolrParams params = request.getParams();
    Collection<ContentStream> streams = requestWriter.getContentStreams(request);//throw IOException
    String path = requestWriter.getPath(request);
    if (path == null || !path.startsWith("/")) {
        path = DEFAULT_PATH;//from   w  w w  .j  av a2  s  .  c  om
    }

    ResponseParser parser = request.getResponseParser();
    if (parser == null) {
        parser = this.parser;
    }

    // The parser 'wt=' and 'version=' params are used instead of the original
    // params
    ModifiableSolrParams wparams = new ModifiableSolrParams(params);
    if (parser != null) {
        wparams.set(CommonParams.WT, parser.getWriterType());
        wparams.set(CommonParams.VERSION, parser.getVersion());
    }
    if (invariantParams != null) {
        wparams.add(invariantParams);
    }

    Integer timeout = wparams.getInt(CommonParams.TIME_ALLOWED);
    if (timeout == null) {
        //mandatory use TIME_ALLOWED
        timeout = defaultTimeout;
        wparams.set(CommonParams.TIME_ALLOWED, timeout);
    }

    saveRequestSetter.setTimeout(timeout);
    saveRequestSetter.setPath(path);
    saveRequestSetter.setSolrParams(wparams);

    if (request.getMethod() != null) {
        saveRequestSetter.setMethod(request.getMethod());
    }

    if (SolrRequest.METHOD.GET == request.getMethod()) {
        if (streams != null) {
            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "GET can't send streams!");
        }
        return parser;
    }

    if (SolrRequest.METHOD.POST == request.getMethod() || SolrRequest.METHOD.PUT == request.getMethod()) {
        if (streams != null) {
            saveRequestSetter.setContentStreams(streams);
        }

        return parser;
    }

    throw new SolrServerException("Unsupported method: " + request.getMethod());
}

From source file:org.vootoo.server.RequestProcesser.java

License:Apache License

protected void handleAdminRequest(SolrRequestHandler handler, SolrQueryRequest solrReq) throws IOException {
    SolrQueryResponse solrResp = new SolrQueryResponse();
    SolrCore.preDecorateResponse(solrReq, solrResp);
    handler.handleRequest(solrReq, solrResp);
    SolrCore.postDecorateResponse(handler, solrReq, solrResp);
    if (logger.isInfoEnabled() && solrResp.getToLog().size() > 0) {
        logger.info(solrResp.getToLogAsString("[admin] "));
    }/*w  w  w  .  ja va  2s .  com*/
    QueryResponseWriter respWriter = SolrCore.DEFAULT_RESPONSE_WRITERS
            .get(solrReq.getParams().get(CommonParams.WT));
    if (respWriter == null)
        respWriter = SolrCore.DEFAULT_RESPONSE_WRITERS.get("standard");
    writeResponse(solrResp, respWriter, solrReq);
}