Example usage for org.apache.solr.common.params CommonParams ROWS

List of usage examples for org.apache.solr.common.params CommonParams ROWS

Introduction

In this page you can find the example usage for org.apache.solr.common.params CommonParams ROWS.

Prototype

String ROWS

To view the source code for org.apache.solr.common.params CommonParams ROWS.

Click Source Link

Document

number of documents to return starting at "start"

Usage

From source file:org.dice.solrenhancements.unsupervisedfeedback.DiceUnsupervisedFeedbackHandler.java

License:Apache License

@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
    SolrIndexSearcher searcher = req.getSearcher();
    SchemaField uniqueKeyField = searcher.getSchema().getUniqueKeyField();
    ModifiableSolrParams params = new ModifiableSolrParams(req.getParams());
    configureSolrParameters(req, params, uniqueKeyField.getName());

    // Set field flags
    ReturnFields returnFields = new SolrReturnFields(req);
    rsp.setReturnFields(returnFields);//from  ww  w .j  a va  2s  .c  o m
    int flags = 0;
    if (returnFields.wantsScore()) {
        flags |= SolrIndexSearcher.GET_SCORES;
    }

    String defType = params.get(QueryParsing.DEFTYPE, QParserPlugin.DEFAULT_QTYPE);
    int maxDocumentsToMatch = params.getInt(UnsupervisedFeedbackParams.MAX_DOCUMENTS_TO_PROCESS,
            UnsupervisedFeedback.DEFAULT_MAX_NUM_DOCUMENTS_TO_PROCESS);
    String q = params.get(CommonParams.Q);
    Query query = null;
    SortSpec sortSpec = null;
    QParser parser = null;

    List<Query> targetFqFilters = null;
    List<Query> mltFqFilters = null;

    try {

        parser = QParser.getParser(q, defType, req);
        query = parser.getQuery();
        sortSpec = parser.getSort(true);

        targetFqFilters = getFilters(req, CommonParams.FQ);
        mltFqFilters = getFilters(req, UnsupervisedFeedbackParams.FQ);
    } catch (SyntaxError e) {
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
    }

    UnsupervisedFeedbackHelper mlt = new UnsupervisedFeedbackHelper(params, searcher, uniqueKeyField, parser);

    // Hold on to the interesting terms if relevant
    UnsupervisedFeedbackParams.TermStyle termStyle = UnsupervisedFeedbackParams.TermStyle
            .get(params.get(UnsupervisedFeedbackParams.INTERESTING_TERMS));
    List<InterestingTerm> interesting = (termStyle == UnsupervisedFeedbackParams.TermStyle.NONE) ? null
            : new ArrayList<InterestingTerm>(mlt.uf.getMaxQueryTermsPerField());

    DocListAndSet uffDocs = null;

    // Parse Required Params
    // This will either have a single Reader or valid query
    Reader reader = null;
    try {
        int start = params.getInt(CommonParams.START, 0);
        int rows = params.getInt(CommonParams.ROWS, 10);

        // Find documents MoreLikeThis - either with a reader or a query
        // --------------------------------------------------------------------------------
        if (q == null) {
            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                    "Dice unsupervised feedback handler requires either a query (?q=) to find similar documents.");

        } else {

            uffDocs = expandQueryAndReExecute(rsp, params, maxDocumentsToMatch, flags, q, query, sortSpec,
                    targetFqFilters, mltFqFilters, searcher, mlt, interesting, uffDocs, start, rows);
        }

    } finally {
        if (reader != null) {
            reader.close();
        }
    }

    if (uffDocs == null) {
        uffDocs = new DocListAndSet(); // avoid NPE
    }
    rsp.add("response", uffDocs.docList);

    if (interesting != null) {
        addInterestingTerms(rsp, termStyle, interesting);
    }

    // maybe facet the results
    if (params.getBool(FacetParams.FACET, false)) {
        addFacet(req, rsp, params, uffDocs);
    }

    addDebugInfo(req, rsp, q, mltFqFilters, mlt, uffDocs);
}

From source file:org.dspace.app.xmlui.aspect.discovery.json.JSONSolrSearcher.java

License:BSD License

public void generate() throws IOException, SAXException, ProcessingException {
    if (solrServerUrl == null) {
        return;//from w w w  .  j a  v a  2 s.co  m
    }

    Map<String, String> params = new HashMap<String, String>();

    String solrRequestUrl = solrServerUrl + "/select";

    //Add our default parameters
    params.put(CommonParams.ROWS, "0");
    params.put(CommonParams.WT, "json");
    //We uwe json as out output type
    params.put("json.nl", "map");
    params.put("json.wrf", jsonWrf);
    params.put(FacetParams.FACET, Boolean.TRUE.toString());

    //Generate our json out of the given params
    try {
        params.put(CommonParams.Q, URLEncoder.encode(query, Constants.DEFAULT_ENCODING));
    } catch (UnsupportedEncodingException uee) {
        //Should never occur
        return;
    }

    params.put(FacetParams.FACET_LIMIT, String.valueOf(facetLimit));
    if (facetSort != null) {
        params.put(FacetParams.FACET_SORT, facetSort);
    }
    params.put(FacetParams.FACET_MINCOUNT, String.valueOf(facetMinCount));

    solrRequestUrl = AbstractDSpaceTransformer.generateURL(solrRequestUrl, params);
    if (facetFields != null || filterQueries != null) {
        StringBuilder urlBuilder = new StringBuilder(solrRequestUrl);
        if (facetFields != null) {

            //Add our facet fields
            for (String facetField : facetFields) {
                urlBuilder.append("&").append(FacetParams.FACET_FIELD).append("=");

                //This class can only be used for autocomplete facet fields
                if (!facetField.endsWith(".year") && !facetField.endsWith("_ac")) {
                    urlBuilder.append(URLEncoder.encode(facetField + "_ac", Constants.DEFAULT_ENCODING));
                } else {
                    urlBuilder.append(URLEncoder.encode(facetField, Constants.DEFAULT_ENCODING));
                }
            }

        }
        if (filterQueries != null) {
            for (String filterQuery : filterQueries) {
                urlBuilder.append("&").append(CommonParams.FQ).append("=")
                        .append(URLEncoder.encode(filterQuery, Constants.DEFAULT_ENCODING));
            }
        }

        solrRequestUrl = urlBuilder.toString();
    }

    try {
        GetMethod get = new GetMethod(solrRequestUrl);
        new HttpClient().executeMethod(get);
        String result = get.getResponseBodyAsString();
        if (result != null) {
            ByteArrayInputStream inputStream = new ByteArrayInputStream(result.getBytes("UTF-8"));

            byte[] buffer = new byte[8192];

            response.setHeader("Content-Length", String.valueOf(result.length()));
            int length;
            while ((length = inputStream.read(buffer)) > -1) {
                out.write(buffer, 0, length);
            }
            out.flush();
        }
    } catch (Exception e) {
        log.error("Error while getting json solr result for discovery search recommendation", e);
        e.printStackTrace();
    }

}

From source file:org.dspace.authority.SolrAuthority.java

License:BSD License

public Choices getMatches(String field, String text, int collection, int start, int limit, String locale,
        boolean bestMatch) {
    if (limit == 0)
        limit = 10;//from w w  w  .j a  va 2 s  .  com

    SolrQuery queryArgs = new SolrQuery();
    if (text == null || text.trim().equals("")) {
        queryArgs.setQuery("*:*");
    } else {
        String searchField = "value";
        String localSearchField = "";
        try {
            //A downside of the authors is that the locale is sometimes a number, make sure that this isn't one
            Integer.parseInt(locale);
            locale = null;
        } catch (NumberFormatException e) {
            //Everything is allright
        }
        if (locale != null && !"".equals(locale)) {
            localSearchField = searchField + "_" + locale;
        }

        String query = "(" + toQuery(searchField, text) + ") ";
        if (!localSearchField.equals("")) {
            query += " or (" + toQuery(localSearchField, text) + ")";
        }
        queryArgs.setQuery(query);
    }

    queryArgs.addFilterQuery("field:" + field);
    queryArgs.set(CommonParams.START, start);
    //We add one to our facet limit so that we know if there are more matches
    int maxNumberOfSolrResults = limit + 1;
    if (externalResults) {
        maxNumberOfSolrResults = ConfigurationManager.getIntProperty("xmlui.lookup.select.size", 12);
    }
    queryArgs.set(CommonParams.ROWS, maxNumberOfSolrResults);

    String sortField = "value";
    String localSortField = "";
    if (StringUtils.isNotBlank(locale)) {
        localSortField = sortField + "_" + locale;
        queryArgs.setSortField(localSortField, SolrQuery.ORDER.asc);
    } else {
        queryArgs.setSortField(sortField, SolrQuery.ORDER.asc);
    }

    Choices result;
    try {
        int max = 0;
        boolean hasMore = false;
        QueryResponse searchResponse = getSearchService().search(queryArgs);
        SolrDocumentList authDocs = searchResponse.getResults();
        ArrayList<Choice> choices = new ArrayList<Choice>();
        if (authDocs != null) {
            max = (int) searchResponse.getResults().getNumFound();
            int maxDocs = authDocs.size();
            if (limit < maxDocs)
                maxDocs = limit;
            List<AuthorityValue> alreadyPresent = new ArrayList<AuthorityValue>();
            for (int i = 0; i < maxDocs; i++) {
                SolrDocument solrDocument = authDocs.get(i);
                if (solrDocument != null) {
                    AuthorityValue val = AuthorityValue.fromSolr(solrDocument);

                    Map<String, String> extras = val.choiceSelectMap();
                    extras.put("insolr", val.getId());
                    choices.add(new Choice(val.getId(), val.getValue(), val.getValue(), extras));
                    alreadyPresent.add(val);
                }
            }

            if (externalResults && StringUtils.isNotBlank(text)) {
                int sizeFromSolr = alreadyPresent.size();
                int maxExternalResults = limit <= 10 ? Math.max(limit - sizeFromSolr, 2)
                        : Math.max(limit - 10 - sizeFromSolr, 2) + limit - 10;
                addExternalResults(text, choices, alreadyPresent, maxExternalResults);
            }

            // hasMore = (authDocs.size() == (limit + 1));
            hasMore = true;
        }

        int confidence;
        if (choices.size() == 0)
            confidence = Choices.CF_NOTFOUND;
        else if (choices.size() == 1)
            confidence = Choices.CF_UNCERTAIN;
        else
            confidence = Choices.CF_AMBIGUOUS;

        result = new Choices(choices.toArray(new Choice[choices.size()]), start,
                hasMore ? max : choices.size() + start, confidence, hasMore);
    } catch (Exception e) {
        log.error("Error while retrieving authority values {field: " + field + ", prefix:" + text + "}", e);
        result = new Choices(true);
    }

    return result; //To change body of implemented methods use File | Settings | File Templates.
}

From source file:org.dspace.statistics.SolrLogger.java

License:BSD License

public static void shardSolrIndex() throws IOException, SolrServerException {
    /*/*from www.j  a v  a2 s .  co m*/
    Start by faceting by year so we can include each year in a separate core !
     */
    SolrQuery yearRangeQuery = new SolrQuery();
    yearRangeQuery.setQuery("*:*");
    yearRangeQuery.setRows(0);
    yearRangeQuery.setFacet(true);
    yearRangeQuery.add(FacetParams.FACET_RANGE, "time");
    //We go back to 2000 the year 2000, this is a bit overkill but this way we ensure we have everything
    //The alternative would be to sort but that isn't recommended since it would be a very costly query !
    yearRangeQuery.add(FacetParams.FACET_RANGE_START,
            "NOW/YEAR-" + (Calendar.getInstance().get(Calendar.YEAR) - 2000) + "YEARS");
    //Add the +0year to ensure that we DO NOT include the current year
    yearRangeQuery.add(FacetParams.FACET_RANGE_END, "NOW/YEAR+0YEARS");
    yearRangeQuery.add(FacetParams.FACET_RANGE_GAP, "+1YEAR");
    yearRangeQuery.add(FacetParams.FACET_MINCOUNT, String.valueOf(1));

    //Create a temp directory to store our files in !
    File tempDirectory = new File(
            ConfigurationManager.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
    tempDirectory.mkdirs();

    QueryResponse queryResponse = solr.query(yearRangeQuery);
    //We only have one range query !
    List<RangeFacet.Count> yearResults = queryResponse.getFacetRanges().get(0).getCounts();
    for (RangeFacet.Count count : yearResults) {
        long totalRecords = count.getCount();

        //Create a range query from this !
        //We start with out current year
        DCDate dcStart = new DCDate(count.getValue());
        Calendar endDate = Calendar.getInstance();
        //Advance one year for the start of the next one !
        endDate.setTime(dcStart.toDate());
        endDate.add(Calendar.YEAR, 1);
        DCDate dcEndDate = new DCDate(endDate.getTime());

        StringBuilder filterQuery = new StringBuilder();
        filterQuery.append("time:([");
        filterQuery.append(ClientUtils.escapeQueryChars(dcStart.toString()));
        filterQuery.append(" TO ");
        filterQuery.append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append("]");
        //The next part of the filter query excludes the content from midnight of the next year !
        filterQuery.append(" NOT ").append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append(")");

        Map<String, String> yearQueryParams = new HashMap<String, String>();
        yearQueryParams.put(CommonParams.Q, "*:*");
        yearQueryParams.put(CommonParams.ROWS, String.valueOf(10000));
        yearQueryParams.put(CommonParams.FQ, filterQuery.toString());
        yearQueryParams.put(CommonParams.WT, "csv");

        //Start by creating a new core
        String coreName = "statistics-" + dcStart.getYear();
        HttpSolrServer statisticsYearServer = createCore(solr, coreName);

        System.out.println("Moving: " + totalRecords + " into core " + coreName);
        log.info("Moving: " + totalRecords + " records into core " + coreName);

        List<File> filesToUpload = new ArrayList<File>();
        for (int i = 0; i < totalRecords; i += 10000) {
            String solrRequestUrl = solr.getBaseURL() + "/select";
            solrRequestUrl = generateURL(solrRequestUrl, yearQueryParams);

            HttpGet get = new HttpGet(solrRequestUrl);
            HttpResponse response = new DefaultHttpClient().execute(get);
            InputStream csvInputstream = response.getEntity().getContent();
            //Write the csv ouput to a file !
            File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYear()
                    + "." + i + ".csv");
            FileUtils.copyInputStreamToFile(csvInputstream, csvFile);
            filesToUpload.add(csvFile);

            //Add 10000 & start over again
            yearQueryParams.put(CommonParams.START, String.valueOf((i + 10000)));
        }

        for (File tempCsv : filesToUpload) {
            //Upload the data in the csv files to our new solr core
            ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest(
                    "/update/csv");
            contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
            contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");

            statisticsYearServer.request(contentStreamUpdateRequest);
        }
        statisticsYearServer.commit(true, true);

        //Delete contents of this year from our year query !
        solr.deleteByQuery(filterQuery.toString());
        solr.commit(true, true);

        log.info("Moved " + totalRecords + " records into core: " + coreName);
    }

    FileUtils.deleteDirectory(tempDirectory);
}

From source file:org.dspace.statistics.SolrLogger.java

License:BSD License

public static void reindexBitstreamHits(boolean removeDeletedBitstreams) throws Exception {
    Context context = new Context();

    try {//from ww  w  .  jav a2 s  .c o  m
        //First of all retrieve the total number of records to be updated
        SolrQuery query = new SolrQuery();
        query.setQuery("*:*");
        query.addFilterQuery("type:" + Constants.BITSTREAM);
        //Only retrieve records which do not have a bundle name
        query.addFilterQuery("-bundleName:[* TO *]");
        query.setRows(0);
        addAdditionalSolrYearCores(query);
        long totalRecords = solr.query(query).getResults().getNumFound();

        File tempDirectory = new File(
                ConfigurationManager.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
        tempDirectory.mkdirs();
        List<File> tempCsvFiles = new ArrayList<File>();
        for (int i = 0; i < totalRecords; i += 10000) {
            Map<String, String> params = new HashMap<String, String>();
            params.put(CommonParams.Q, "*:*");
            params.put(CommonParams.FQ, "-bundleName:[* TO *] AND type:" + Constants.BITSTREAM);
            params.put(CommonParams.WT, "csv");
            params.put(CommonParams.ROWS, String.valueOf(10000));
            params.put(CommonParams.START, String.valueOf(i));

            String solrRequestUrl = solr.getBaseURL() + "/select";
            solrRequestUrl = generateURL(solrRequestUrl, params);

            HttpGet get = new HttpGet(solrRequestUrl);
            HttpResponse response = new DefaultHttpClient().execute(get);

            InputStream csvOutput = response.getEntity().getContent();
            Reader csvReader = new InputStreamReader(csvOutput);
            List<String[]> rows = new CSVReader(csvReader).readAll();
            String[][] csvParsed = rows.toArray(new String[rows.size()][]);
            String[] header = csvParsed[0];
            //Attempt to find the bitstream id index !
            int idIndex = 0;
            for (int j = 0; j < header.length; j++) {
                if (header[j].equals("id")) {
                    idIndex = j;
                }
            }

            File tempCsv = new File(tempDirectory.getPath() + File.separatorChar + "temp." + i + ".csv");
            tempCsvFiles.add(tempCsv);
            CSVWriter csvp = new CSVWriter(new FileWriter(tempCsv));
            //csvp.setAlwaysQuote(false);

            //Write the header !
            csvp.writeNext((String[]) ArrayUtils.add(header, "bundleName"));
            Map<Integer, String> bitBundleCache = new HashMap<Integer, String>();
            //Loop over each line (skip the headers though)!
            for (int j = 1; j < csvParsed.length; j++) {
                String[] csvLine = csvParsed[j];
                //Write the default line !
                int bitstreamId = Integer.parseInt(csvLine[idIndex]);
                //Attempt to retrieve our bundle name from the cache !
                String bundleName = bitBundleCache.get(bitstreamId);
                if (bundleName == null) {
                    //Nothing found retrieve the bitstream
                    Bitstream bitstream = Bitstream.find(context, bitstreamId);
                    //Attempt to retrieve our bitstream !
                    if (bitstream != null) {
                        Bundle[] bundles = bitstream.getBundles();
                        if (bundles != null && 0 < bundles.length) {
                            Bundle bundle = bundles[0];
                            bundleName = bundle.getName();
                            context.removeCached(bundle, bundle.getID());
                        } else {
                            //No bundle found, we are either a collection or a community logo, check for it !
                            DSpaceObject parentObject = bitstream.getParentObject();
                            if (parentObject instanceof Collection) {
                                bundleName = "LOGO-COLLECTION";
                            } else if (parentObject instanceof Community) {
                                bundleName = "LOGO-COMMUNITY";
                            }
                            if (parentObject != null) {
                                context.removeCached(parentObject, parentObject.getID());
                            }

                        }
                        //Cache the bundle name
                        bitBundleCache.put(bitstream.getID(), bundleName);
                        //Remove the bitstream from cache
                        context.removeCached(bitstream, bitstreamId);
                    }
                    //Check if we don't have a bundlename
                    //If we don't have one & we do not need to delete the deleted bitstreams ensure that a BITSTREAM_DELETED bundle name is given !
                    if (bundleName == null && !removeDeletedBitstreams) {
                        bundleName = "BITSTREAM_DELETED";
                    }
                }
                csvp.writeNext((String[]) ArrayUtils.add(csvLine, bundleName));
            }

            //Loop over our parsed csv
            csvp.flush();
            csvp.close();
        }

        //Add all the separate csv files
        for (File tempCsv : tempCsvFiles) {
            ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest(
                    "/update/csv");
            contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
            contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");

            solr.request(contentStreamUpdateRequest);
        }

        //Now that all our new bitstream stats are in place, delete all the old ones !
        solr.deleteByQuery("-bundleName:[* TO *] AND type:" + Constants.BITSTREAM);
        //Commit everything to wrap up
        solr.commit(true, true);
        //Clean up our directory !
        FileUtils.deleteDirectory(tempDirectory);
    } catch (Exception e) {
        log.error("Error while updating the bitstream statistics", e);
        throw e;
    } finally {
        context.abort();
    }
}

From source file:org.dspace.statistics.SolrLogger.java

License:BSD License

/**
 * Export all SOLR usage statistics for viewing/downloading content to a flat text file.
 * The file goes to a series/*from ww w  .  j a  v  a  2 s .c  o m*/
 *
 * @throws Exception
 */
public static void exportHits() throws Exception {
    Context context = new Context();

    File tempDirectory = new File(
            ConfigurationManager.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
    tempDirectory.mkdirs();

    try {
        //First of all retrieve the total number of records to be updated
        SolrQuery query = new SolrQuery();
        query.setQuery("*:*");

        ModifiableSolrParams solrParams = new ModifiableSolrParams();
        solrParams.set(CommonParams.Q, "statistics_type:view OR (*:* AND -statistics_type:*)");
        solrParams.set(CommonParams.WT, "javabin");
        solrParams.set(CommonParams.ROWS, String.valueOf(10000));

        addAdditionalSolrYearCores(query);
        long totalRecords = solr.query(query).getResults().getNumFound();
        System.out.println("There are " + totalRecords + " usage events in SOLR for download/view.");

        for (int i = 0; i < totalRecords; i += 10000) {
            solrParams.set(CommonParams.START, String.valueOf(i));
            QueryResponse queryResponse = solr.query(solrParams);
            SolrDocumentList docs = queryResponse.getResults();

            File exportOutput = new File(
                    tempDirectory.getPath() + File.separatorChar + "usagestats_" + i + ".csv");
            exportOutput.delete();

            //export docs
            addDocumentsToFile(context, docs, exportOutput);
            System.out.println("Export hits [" + i + " - " + String.valueOf(i + 9999) + "] to "
                    + exportOutput.getCanonicalPath());
        }
    } catch (Exception e) {
        log.error("Error while exporting SOLR data", e);
        throw e;
    } finally {
        context.abort();
    }
}

From source file:org.dspace.statistics.SolrLoggerServiceImpl.java

License:BSD License

@Override
public void shardSolrIndex() throws IOException, SolrServerException {
    /*/*from w  w w .j  a  v a  2 s . co  m*/
    Start by faceting by year so we can include each year in a separate core !
     */
    SolrQuery yearRangeQuery = new SolrQuery();
    yearRangeQuery.setQuery("*:*");
    yearRangeQuery.setRows(0);
    yearRangeQuery.setFacet(true);
    yearRangeQuery.add(FacetParams.FACET_RANGE, "time");
    //We go back to 2000 the year 2000, this is a bit overkill but this way we ensure we have everything
    //The alternative would be to sort but that isn't recommended since it would be a very costly query !
    yearRangeQuery.add(FacetParams.FACET_RANGE_START,
            "NOW/YEAR-" + (Calendar.getInstance().get(Calendar.YEAR) - 2000) + "YEARS");
    //Add the +0year to ensure that we DO NOT include the current year
    yearRangeQuery.add(FacetParams.FACET_RANGE_END, "NOW/YEAR+0YEARS");
    yearRangeQuery.add(FacetParams.FACET_RANGE_GAP, "+1YEAR");
    yearRangeQuery.add(FacetParams.FACET_MINCOUNT, String.valueOf(1));

    //Create a temp directory to store our files in !
    File tempDirectory = new File(
            configurationService.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
    tempDirectory.mkdirs();

    QueryResponse queryResponse = solr.query(yearRangeQuery);
    //We only have one range query !
    List<RangeFacet.Count> yearResults = queryResponse.getFacetRanges().get(0).getCounts();
    for (RangeFacet.Count count : yearResults) {
        long totalRecords = count.getCount();

        //Create a range query from this !
        //We start with out current year
        DCDate dcStart = new DCDate(count.getValue());
        Calendar endDate = Calendar.getInstance();
        //Advance one year for the start of the next one !
        endDate.setTime(dcStart.toDate());
        endDate.add(Calendar.YEAR, 1);
        DCDate dcEndDate = new DCDate(endDate.getTime());

        StringBuilder filterQuery = new StringBuilder();
        filterQuery.append("time:([");
        filterQuery.append(ClientUtils.escapeQueryChars(dcStart.toString()));
        filterQuery.append(" TO ");
        filterQuery.append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append("]");
        //The next part of the filter query excludes the content from midnight of the next year !
        filterQuery.append(" NOT ").append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append(")");

        Map<String, String> yearQueryParams = new HashMap<String, String>();
        yearQueryParams.put(CommonParams.Q, "*:*");
        yearQueryParams.put(CommonParams.ROWS, String.valueOf(10000));
        yearQueryParams.put(CommonParams.FQ, filterQuery.toString());
        yearQueryParams.put(CommonParams.WT, "csv");

        //Start by creating a new core
        String coreName = "statistics-" + dcStart.getYear();
        HttpSolrServer statisticsYearServer = createCore(solr, coreName);

        System.out.println("Moving: " + totalRecords + " into core " + coreName);
        log.info("Moving: " + totalRecords + " records into core " + coreName);

        List<File> filesToUpload = new ArrayList<File>();
        for (int i = 0; i < totalRecords; i += 10000) {
            String solrRequestUrl = solr.getBaseURL() + "/select";
            solrRequestUrl = generateURL(solrRequestUrl, yearQueryParams);

            HttpGet get = new HttpGet(solrRequestUrl);
            HttpResponse response = new DefaultHttpClient().execute(get);
            InputStream csvInputstream = response.getEntity().getContent();
            //Write the csv ouput to a file !
            File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYear()
                    + "." + i + ".csv");
            FileUtils.copyInputStreamToFile(csvInputstream, csvFile);
            filesToUpload.add(csvFile);

            //Add 10000 & start over again
            yearQueryParams.put(CommonParams.START, String.valueOf((i + 10000)));
        }

        for (File tempCsv : filesToUpload) {
            //Upload the data in the csv files to our new solr core
            ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest(
                    "/update/csv");
            contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
            contentStreamUpdateRequest.setParam("skip", "_version_");
            contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");

            statisticsYearServer.request(contentStreamUpdateRequest);
        }
        statisticsYearServer.commit(true, true);

        //Delete contents of this year from our year query !
        solr.deleteByQuery(filterQuery.toString());
        solr.commit(true, true);

        log.info("Moved " + totalRecords + " records into core: " + coreName);
    }

    FileUtils.deleteDirectory(tempDirectory);
}

From source file:org.dspace.statistics.SolrLoggerServiceImpl.java

License:BSD License

@Override
public void reindexBitstreamHits(boolean removeDeletedBitstreams) throws Exception {
    Context context = new Context();

    try {//ww  w  .java2s.c o m
        //First of all retrieve the total number of records to be updated
        SolrQuery query = new SolrQuery();
        query.setQuery("*:*");
        query.addFilterQuery("type:" + Constants.BITSTREAM);
        //Only retrieve records which do not have a bundle name
        query.addFilterQuery("-bundleName:[* TO *]");
        query.setRows(0);
        addAdditionalSolrYearCores(query);
        long totalRecords = solr.query(query).getResults().getNumFound();

        File tempDirectory = new File(
                configurationService.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
        tempDirectory.mkdirs();
        List<File> tempCsvFiles = new ArrayList<File>();
        for (int i = 0; i < totalRecords; i += 10000) {
            Map<String, String> params = new HashMap<String, String>();
            params.put(CommonParams.Q, "*:*");
            params.put(CommonParams.FQ, "-bundleName:[* TO *] AND type:" + Constants.BITSTREAM);
            params.put(CommonParams.WT, "csv");
            params.put(CommonParams.ROWS, String.valueOf(10000));
            params.put(CommonParams.START, String.valueOf(i));

            String solrRequestUrl = solr.getBaseURL() + "/select";
            solrRequestUrl = generateURL(solrRequestUrl, params);

            HttpGet get = new HttpGet(solrRequestUrl);
            HttpResponse response = new DefaultHttpClient().execute(get);

            InputStream csvOutput = response.getEntity().getContent();
            Reader csvReader = new InputStreamReader(csvOutput);
            List<String[]> rows = new CSVReader(csvReader).readAll();
            String[][] csvParsed = rows.toArray(new String[rows.size()][]);
            String[] header = csvParsed[0];
            //Attempt to find the bitstream id index !
            int idIndex = 0;
            for (int j = 0; j < header.length; j++) {
                if (header[j].equals("id")) {
                    idIndex = j;
                }
            }

            File tempCsv = new File(tempDirectory.getPath() + File.separatorChar + "temp." + i + ".csv");
            tempCsvFiles.add(tempCsv);
            CSVWriter csvp = new CSVWriter(new FileWriter(tempCsv));
            //csvp.setAlwaysQuote(false);

            //Write the header !
            csvp.writeNext((String[]) ArrayUtils.add(header, "bundleName"));
            Map<String, String> bitBundleCache = new HashMap<>();
            //Loop over each line (skip the headers though)!
            for (int j = 1; j < csvParsed.length; j++) {
                String[] csvLine = csvParsed[j];
                //Write the default line !
                String bitstreamId = csvLine[idIndex];
                //Attempt to retrieve our bundle name from the cache !
                String bundleName = bitBundleCache.get(bitstreamId);
                if (bundleName == null) {
                    //Nothing found retrieve the bitstream
                    Bitstream bitstream = bitstreamService.findByIdOrLegacyId(context, bitstreamId);
                    //Attempt to retrieve our bitstream !
                    if (bitstream != null) {
                        List<Bundle> bundles = bitstream.getBundles();
                        if (bundles != null && 0 < bundles.size()) {
                            Bundle bundle = bundles.get(0);
                            bundleName = bundle.getName();
                        } else {
                            //No bundle found, we are either a collection or a community logo, check for it !
                            DSpaceObject parentObject = bitstreamService.getParentObject(context, bitstream);
                            if (parentObject instanceof Collection) {
                                bundleName = "LOGO-COLLECTION";
                            } else if (parentObject instanceof Community) {
                                bundleName = "LOGO-COMMUNITY";
                            }

                        }
                        //Cache the bundle name
                        bitBundleCache.put(bitstream.getID().toString(), bundleName);
                        //Remove the bitstream from cache
                    }
                    //Check if we don't have a bundlename
                    //If we don't have one & we do not need to delete the deleted bitstreams ensure that a BITSTREAM_DELETED bundle name is given !
                    if (bundleName == null && !removeDeletedBitstreams) {
                        bundleName = "BITSTREAM_DELETED";
                    }
                }
                csvp.writeNext((String[]) ArrayUtils.add(csvLine, bundleName));
            }

            //Loop over our parsed csv
            csvp.flush();
            csvp.close();
        }

        //Add all the separate csv files
        for (File tempCsv : tempCsvFiles) {
            ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest(
                    "/update/csv");
            contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
            contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");

            solr.request(contentStreamUpdateRequest);
        }

        //Now that all our new bitstream stats are in place, delete all the old ones !
        solr.deleteByQuery("-bundleName:[* TO *] AND type:" + Constants.BITSTREAM);
        //Commit everything to wrap up
        solr.commit(true, true);
        //Clean up our directory !
        FileUtils.deleteDirectory(tempDirectory);
    } catch (Exception e) {
        log.error("Error while updating the bitstream statistics", e);
        throw e;
    } finally {
        context.abort();
    }
}

From source file:org.dspace.statistics.SolrLoggerServiceImpl.java

License:BSD License

@Override
public void exportHits() throws Exception {
    Context context = new Context();

    File tempDirectory = new File(
            configurationService.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
    tempDirectory.mkdirs();//from www  . j  a  v  a  2s. c o  m

    try {
        //First of all retrieve the total number of records to be updated
        SolrQuery query = new SolrQuery();
        query.setQuery("*:*");

        ModifiableSolrParams solrParams = new ModifiableSolrParams();
        solrParams.set(CommonParams.Q, "statistics_type:view OR (*:* AND -statistics_type:*)");
        solrParams.set(CommonParams.WT, "javabin");
        solrParams.set(CommonParams.ROWS, String.valueOf(10000));

        addAdditionalSolrYearCores(query);
        long totalRecords = solr.query(query).getResults().getNumFound();
        System.out.println("There are " + totalRecords + " usage events in SOLR for download/view.");

        for (int i = 0; i < totalRecords; i += 10000) {
            solrParams.set(CommonParams.START, String.valueOf(i));
            QueryResponse queryResponse = solr.query(solrParams);
            SolrDocumentList docs = queryResponse.getResults();

            File exportOutput = new File(
                    tempDirectory.getPath() + File.separatorChar + "usagestats_" + i + ".csv");
            exportOutput.delete();

            //export docs
            addDocumentsToFile(context, docs, exportOutput);
            System.out.println("Export hits [" + i + " - " + String.valueOf(i + 9999) + "] to "
                    + exportOutput.getCanonicalPath());
        }
    } catch (Exception e) {
        log.error("Error while exporting SOLR data", e);
        throw e;
    } finally {
        context.abort();
    }
}

From source file:org.eclipse.orion.internal.server.search.IndexPurgeJob.java

License:Open Source License

private SolrQuery findAllQuery() {
    SolrQuery query = new SolrQuery();
    query.setParam(CommonParams.ROWS, Long.toString(PAGE_SIZE));
    query.setQuery("*:*"); //$NON-NLS-1$
    return query;
}