Example usage for org.apache.solr.client.solrj SolrQuery add

List of usage examples for org.apache.solr.client.solrj SolrQuery add

Introduction

In this page you can find the example usage for org.apache.solr.client.solrj SolrQuery add.

Prototype

public ModifiableSolrParams add(String name, String... val) 

Source Link

Document

Add the given values to any existing name

Usage

From source file:org.codelibs.fess.service.SearchService.java

License:Apache License

public List<Map<String, Object>> getDocumentList(final String query, final int start, final int rows,
        final FacetInfo facetInfo, final GeoInfo geoInfo, final MoreLikeThisInfo mltInfo,
        final String[] responseFields, final String[] docValuesFields, final boolean forUser) {
    if (start > queryHelper.getMaxSearchResultOffset()) {
        throw new ResultOffsetExceededException("The number of result size is exceeded.");
    }//  w w w .ja  v  a  2  s  .c  om

    final long startTime = System.currentTimeMillis();

    final SolrGroup solrGroup = solrGroupManager.getSolrGroup(QueryType.QUERY);

    QueryResponse queryResponse = null;
    final SolrQuery solrQuery = new SolrQuery();
    final SearchQuery searchQuery = queryHelper.build(query, forUser);
    final String q = searchQuery.getQuery();
    if (StringUtil.isNotBlank(q)) {
        // fields
        solrQuery.setFields(responseFields);
        // query
        solrQuery.setQuery(q);
        solrQuery.setStart(start);
        solrQuery.setRows(rows);
        solrQuery.set("mm", searchQuery.getMinimumShouldMatch());
        solrQuery.set("defType", searchQuery.getDefType());
        for (final Map.Entry<String, String[]> entry : queryHelper.getQueryParamMap().entrySet()) {
            solrQuery.set(entry.getKey(), entry.getValue());
        }
        // filter query
        if (searchQuery.hasFilterQueries()) {
            solrQuery.addFilterQuery(searchQuery.getFilterQueries());
        }
        // sort
        final SortField[] sortFields = searchQuery.getSortFields();
        if (sortFields.length != 0) {
            for (final SortField sortField : sortFields) {
                solrQuery.addSort(sortField.getField(),
                        Constants.DESC.equals(sortField.getOrder()) ? SolrQuery.ORDER.desc
                                : SolrQuery.ORDER.asc);
            }
        } else if (queryHelper.hasDefaultSortFields()) {
            for (final SortField sortField : queryHelper.getDefaultSortFields()) {
                solrQuery.addSort(sortField.getField(),
                        Constants.DESC.equals(sortField.getOrder()) ? SolrQuery.ORDER.desc
                                : SolrQuery.ORDER.asc);
            }
        }
        // highlighting
        if (queryHelper.getHighlightingFields() != null && queryHelper.getHighlightingFields().length != 0) {
            for (final String hf : queryHelper.getHighlightingFields()) {
                solrQuery.addHighlightField(hf);
            }
            solrQuery.setHighlightSnippets(queryHelper.getHighlightSnippetSize());
        }
        // shards
        if (queryHelper.getShards() != null) {
            solrQuery.setParam("shards", queryHelper.getShards());
        }
        // geo
        if (geoInfo != null && geoInfo.isAvailable()) {
            solrQuery.addFilterQuery(geoInfo.toGeoQueryString());
            final String additionalGeoQuery = queryHelper.getAdditionalGeoQuery();
            if (StringUtil.isNotBlank(additionalGeoQuery)) {
                solrQuery.addFilterQuery(additionalGeoQuery);
            }
        }
        // facets
        if (facetInfo != null) {
            solrQuery.setFacet(true);
            if (facetInfo.field != null) {
                for (final String f : facetInfo.field) {
                    if (queryHelper.isFacetField(f)) {
                        solrQuery.addFacetField(f);
                    } else {
                        throw new FessSolrQueryException("EFESS0002", new Object[] { f });
                    }
                }
            }
            if (facetInfo.query != null) {
                for (final String fq : facetInfo.query) {
                    final String facetQuery = queryHelper.buildFacetQuery(fq);
                    if (StringUtil.isNotBlank(facetQuery)) {
                        solrQuery.addFacetQuery(facetQuery);
                    } else {
                        throw new FessSolrQueryException("EFESS0003", new Object[] { fq, facetQuery });
                    }
                }
            }
            if (facetInfo.limit != null) {
                solrQuery.setFacetLimit(Integer.parseInt(facetInfo.limit));
            }
            if (facetInfo.minCount != null) {
                solrQuery.setFacetMinCount(Integer.parseInt(facetInfo.minCount));
            }
            if (facetInfo.missing != null) {
                solrQuery.setFacetMissing(Boolean.parseBoolean(facetInfo.missing));
            }
            if (facetInfo.prefix != null) {
                solrQuery.setFacetPrefix(facetInfo.prefix);
            }
            if (facetInfo.sort != null && queryHelper.isFacetSortValue(facetInfo.sort)) {
                solrQuery.setFacetSort(facetInfo.sort);
            }
        }
        // mlt
        if (mltInfo != null) {
            final String mltField = queryHelper.getMoreLikeThisField(mltInfo.field);
            if (mltField != null) {
                solrQuery.set("mlt", true);
                if (mltInfo.count != null) {
                    solrQuery.set("mlt.count", Integer.parseInt(mltInfo.count));
                }
                solrQuery.set("mlt.fl", mltField);
            }
        }

        if (queryHelper.getTimeAllowed() >= 0) {
            solrQuery.setTimeAllowed(queryHelper.getTimeAllowed());
        }
        final Set<Entry<String, String[]>> paramSet = queryHelper.getRequestParameterSet();
        if (!paramSet.isEmpty()) {
            for (final Map.Entry<String, String[]> entry : paramSet) {
                solrQuery.set(entry.getKey(), entry.getValue());
            }
        }

        if (docValuesFields != null) {
            for (final String docValuesField : docValuesFields) {
                solrQuery.add(Constants.DCF, docValuesField);
            }
        }

        queryResponse = solrGroup.query(solrQuery, SolrRequest.METHOD.POST);
    }
    final long execTime = System.currentTimeMillis() - startTime;

    final QueryResponseList queryResponseList = ComponentUtil.getQueryResponseList();
    queryResponseList.init(queryResponse, rows);
    queryResponseList.setSearchQuery(q);
    queryResponseList.setSolrQuery(solrQuery.toString());
    queryResponseList.setExecTime(execTime);
    return queryResponseList;
}

From source file:org.dspace.discovery.SolrServiceImpl.java

License:BSD License

protected SolrQuery resolveToSolrQuery(Context context, DiscoverQuery discoveryQuery,
        boolean includeUnDiscoverable) {
    SolrQuery solrQuery = new SolrQuery();

    String query = "*:*";
    if (discoveryQuery.getQuery() != null) {
        query = discoveryQuery.getQuery();
    }//ww  w .j a  va2s. co m

    solrQuery.setQuery(query);
    if (discoveryQuery.isSpellCheck()) {
        solrQuery.setParam(SpellingParams.SPELLCHECK_Q, query);
        solrQuery.setParam(SpellingParams.SPELLCHECK_COLLATE, Boolean.TRUE);
        solrQuery.setParam("spellcheck", Boolean.TRUE);
    }

    if (!includeUnDiscoverable) {
        solrQuery.addFilterQuery("NOT(withdrawn:true)");
        solrQuery.addFilterQuery("NOT(discoverable:false)");
    }

    for (int i = 0; i < discoveryQuery.getFilterQueries().size(); i++) {
        String filterQuery = discoveryQuery.getFilterQueries().get(i);
        solrQuery.addFilterQuery(filterQuery);
    }
    if (discoveryQuery.getDSpaceObjectFilter() != -1) {
        solrQuery.addFilterQuery("search.resourcetype:" + discoveryQuery.getDSpaceObjectFilter());
    }

    for (int i = 0; i < discoveryQuery.getFieldPresentQueries().size(); i++) {
        String filterQuery = discoveryQuery.getFieldPresentQueries().get(i);
        solrQuery.addFilterQuery(filterQuery + ":[* TO *]");
    }

    if (discoveryQuery.getStart() != -1) {
        solrQuery.setStart(discoveryQuery.getStart());
    }

    if (discoveryQuery.getMaxResults() != -1) {
        solrQuery.setRows(discoveryQuery.getMaxResults());
    }

    if (discoveryQuery.getSortField() != null) {
        SolrQuery.ORDER order = SolrQuery.ORDER.asc;
        if (discoveryQuery.getSortOrder().equals(DiscoverQuery.SORT_ORDER.desc))
            order = SolrQuery.ORDER.desc;

        solrQuery.addSortField(discoveryQuery.getSortField(), order);
    }

    for (String property : discoveryQuery.getProperties().keySet()) {
        List<String> values = discoveryQuery.getProperties().get(property);
        solrQuery.add(property, values.toArray(new String[values.size()]));
    }

    List<DiscoverFacetField> facetFields = discoveryQuery.getFacetFields();
    if (0 < facetFields.size()) {
        //Only add facet information if there are any facets
        for (DiscoverFacetField facetFieldConfig : facetFields) {
            String field = transformFacetField(facetFieldConfig, facetFieldConfig.getField(), false);
            solrQuery.addFacetField(field);

            // Setting the facet limit in this fashion ensures that each facet can have its own max
            solrQuery.add("f." + field + "." + FacetParams.FACET_LIMIT,
                    String.valueOf(facetFieldConfig.getLimit()));
            String facetSort;
            if (DiscoveryConfigurationParameters.SORT.COUNT.equals(facetFieldConfig.getSortOrder())) {
                facetSort = FacetParams.FACET_SORT_COUNT;
            } else {
                facetSort = FacetParams.FACET_SORT_INDEX;
            }
            solrQuery.add("f." + field + "." + FacetParams.FACET_SORT, facetSort);
            if (facetFieldConfig.getOffset() != -1) {
                solrQuery.setParam("f." + field + "." + FacetParams.FACET_OFFSET,
                        String.valueOf(facetFieldConfig.getOffset()));
            }
            if (facetFieldConfig.getPrefix() != null) {
                solrQuery.setFacetPrefix(field, facetFieldConfig.getPrefix());
            }
        }

        List<String> facetQueries = discoveryQuery.getFacetQueries();
        for (String facetQuery : facetQueries) {
            solrQuery.addFacetQuery(facetQuery);
        }

        if (discoveryQuery.getFacetMinCount() != -1) {
            solrQuery.setFacetMinCount(discoveryQuery.getFacetMinCount());
        }

        solrQuery.setParam(FacetParams.FACET_OFFSET, String.valueOf(discoveryQuery.getFacetOffset()));
    }

    if (0 < discoveryQuery.getHitHighlightingFields().size()) {
        solrQuery.setHighlight(true);
        solrQuery.add(HighlightParams.USE_PHRASE_HIGHLIGHTER, Boolean.TRUE.toString());
        for (DiscoverHitHighlightingField highlightingField : discoveryQuery.getHitHighlightingFields()) {
            solrQuery.addHighlightField(highlightingField.getField() + "_hl");
            solrQuery.add("f." + highlightingField.getField() + "_hl." + HighlightParams.FRAGSIZE,
                    String.valueOf(highlightingField.getMaxChars()));
            solrQuery.add("f." + highlightingField.getField() + "_hl." + HighlightParams.SNIPPETS,
                    String.valueOf(highlightingField.getMaxSnippets()));
        }

    }

    //Add any configured search plugins !
    List<SolrServiceSearchPlugin> solrServiceSearchPlugins = new DSpace().getServiceManager()
            .getServicesByType(SolrServiceSearchPlugin.class);
    for (SolrServiceSearchPlugin searchPlugin : solrServiceSearchPlugins) {
        searchPlugin.additionalSearchParameters(context, discoveryQuery, solrQuery);
    }
    return solrQuery;
}

From source file:org.dspace.statistics.SolrLogger.java

License:BSD License

public static void shardSolrIndex() throws IOException, SolrServerException {
    /*//from ww w  .ja  v a  2s  .  c  om
    Start by faceting by year so we can include each year in a separate core !
     */
    SolrQuery yearRangeQuery = new SolrQuery();
    yearRangeQuery.setQuery("*:*");
    yearRangeQuery.setRows(0);
    yearRangeQuery.setFacet(true);
    yearRangeQuery.add(FacetParams.FACET_RANGE, "time");
    //We go back to 2000 the year 2000, this is a bit overkill but this way we ensure we have everything
    //The alternative would be to sort but that isn't recommended since it would be a very costly query !
    yearRangeQuery.add(FacetParams.FACET_RANGE_START,
            "NOW/YEAR-" + (Calendar.getInstance().get(Calendar.YEAR) - 2000) + "YEARS");
    //Add the +0year to ensure that we DO NOT include the current year
    yearRangeQuery.add(FacetParams.FACET_RANGE_END, "NOW/YEAR+0YEARS");
    yearRangeQuery.add(FacetParams.FACET_RANGE_GAP, "+1YEAR");
    yearRangeQuery.add(FacetParams.FACET_MINCOUNT, String.valueOf(1));

    //Create a temp directory to store our files in !
    File tempDirectory = new File(
            ConfigurationManager.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
    tempDirectory.mkdirs();

    QueryResponse queryResponse = solr.query(yearRangeQuery);
    //We only have one range query !
    List<RangeFacet.Count> yearResults = queryResponse.getFacetRanges().get(0).getCounts();
    for (RangeFacet.Count count : yearResults) {
        long totalRecords = count.getCount();

        //Create a range query from this !
        //We start with out current year
        DCDate dcStart = new DCDate(count.getValue());
        Calendar endDate = Calendar.getInstance();
        //Advance one year for the start of the next one !
        endDate.setTime(dcStart.toDate());
        endDate.add(Calendar.YEAR, 1);
        DCDate dcEndDate = new DCDate(endDate.getTime());

        StringBuilder filterQuery = new StringBuilder();
        filterQuery.append("time:([");
        filterQuery.append(ClientUtils.escapeQueryChars(dcStart.toString()));
        filterQuery.append(" TO ");
        filterQuery.append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append("]");
        //The next part of the filter query excludes the content from midnight of the next year !
        filterQuery.append(" NOT ").append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append(")");

        Map<String, String> yearQueryParams = new HashMap<String, String>();
        yearQueryParams.put(CommonParams.Q, "*:*");
        yearQueryParams.put(CommonParams.ROWS, String.valueOf(10000));
        yearQueryParams.put(CommonParams.FQ, filterQuery.toString());
        yearQueryParams.put(CommonParams.WT, "csv");

        //Start by creating a new core
        String coreName = "statistics-" + dcStart.getYear();
        HttpSolrServer statisticsYearServer = createCore(solr, coreName);

        System.out.println("Moving: " + totalRecords + " into core " + coreName);
        log.info("Moving: " + totalRecords + " records into core " + coreName);

        List<File> filesToUpload = new ArrayList<File>();
        for (int i = 0; i < totalRecords; i += 10000) {
            String solrRequestUrl = solr.getBaseURL() + "/select";
            solrRequestUrl = generateURL(solrRequestUrl, yearQueryParams);

            HttpGet get = new HttpGet(solrRequestUrl);
            HttpResponse response = new DefaultHttpClient().execute(get);
            InputStream csvInputstream = response.getEntity().getContent();
            //Write the csv ouput to a file !
            File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYear()
                    + "." + i + ".csv");
            FileUtils.copyInputStreamToFile(csvInputstream, csvFile);
            filesToUpload.add(csvFile);

            //Add 10000 & start over again
            yearQueryParams.put(CommonParams.START, String.valueOf((i + 10000)));
        }

        for (File tempCsv : filesToUpload) {
            //Upload the data in the csv files to our new solr core
            ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest(
                    "/update/csv");
            contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
            contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");

            statisticsYearServer.request(contentStreamUpdateRequest);
        }
        statisticsYearServer.commit(true, true);

        //Delete contents of this year from our year query !
        solr.deleteByQuery(filterQuery.toString());
        solr.commit(true, true);

        log.info("Moved " + totalRecords + " records into core: " + coreName);
    }

    FileUtils.deleteDirectory(tempDirectory);
}

From source file:org.dspace.statistics.SolrLogger.java

License:BSD License

private static void addAdditionalSolrYearCores(SolrQuery solrQuery) {
    //Only add if needed
    if (0 < statisticYearCores.size()) {
        //The shards are a comma separated list of the urls to the cores
        solrQuery.add(ShardParams.SHARDS, StringUtils.join(statisticYearCores.iterator(), ","));
    }//from   w  w  w.  java 2 s .  co  m

}

From source file:org.dspace.statistics.SolrLoggerServiceImpl.java

License:BSD License

@Override
public void shardSolrIndex() throws IOException, SolrServerException {
    /*/*from w w w .j av a  2 s .  c om*/
    Start by faceting by year so we can include each year in a separate core !
     */
    SolrQuery yearRangeQuery = new SolrQuery();
    yearRangeQuery.setQuery("*:*");
    yearRangeQuery.setRows(0);
    yearRangeQuery.setFacet(true);
    yearRangeQuery.add(FacetParams.FACET_RANGE, "time");
    //We go back to 2000 the year 2000, this is a bit overkill but this way we ensure we have everything
    //The alternative would be to sort but that isn't recommended since it would be a very costly query !
    yearRangeQuery.add(FacetParams.FACET_RANGE_START,
            "NOW/YEAR-" + (Calendar.getInstance().get(Calendar.YEAR) - 2000) + "YEARS");
    //Add the +0year to ensure that we DO NOT include the current year
    yearRangeQuery.add(FacetParams.FACET_RANGE_END, "NOW/YEAR+0YEARS");
    yearRangeQuery.add(FacetParams.FACET_RANGE_GAP, "+1YEAR");
    yearRangeQuery.add(FacetParams.FACET_MINCOUNT, String.valueOf(1));

    //Create a temp directory to store our files in !
    File tempDirectory = new File(
            configurationService.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
    tempDirectory.mkdirs();

    QueryResponse queryResponse = solr.query(yearRangeQuery);
    //We only have one range query !
    List<RangeFacet.Count> yearResults = queryResponse.getFacetRanges().get(0).getCounts();
    for (RangeFacet.Count count : yearResults) {
        long totalRecords = count.getCount();

        //Create a range query from this !
        //We start with out current year
        DCDate dcStart = new DCDate(count.getValue());
        Calendar endDate = Calendar.getInstance();
        //Advance one year for the start of the next one !
        endDate.setTime(dcStart.toDate());
        endDate.add(Calendar.YEAR, 1);
        DCDate dcEndDate = new DCDate(endDate.getTime());

        StringBuilder filterQuery = new StringBuilder();
        filterQuery.append("time:([");
        filterQuery.append(ClientUtils.escapeQueryChars(dcStart.toString()));
        filterQuery.append(" TO ");
        filterQuery.append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append("]");
        //The next part of the filter query excludes the content from midnight of the next year !
        filterQuery.append(" NOT ").append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append(")");

        Map<String, String> yearQueryParams = new HashMap<String, String>();
        yearQueryParams.put(CommonParams.Q, "*:*");
        yearQueryParams.put(CommonParams.ROWS, String.valueOf(10000));
        yearQueryParams.put(CommonParams.FQ, filterQuery.toString());
        yearQueryParams.put(CommonParams.WT, "csv");

        //Start by creating a new core
        String coreName = "statistics-" + dcStart.getYear();
        HttpSolrServer statisticsYearServer = createCore(solr, coreName);

        System.out.println("Moving: " + totalRecords + " into core " + coreName);
        log.info("Moving: " + totalRecords + " records into core " + coreName);

        List<File> filesToUpload = new ArrayList<File>();
        for (int i = 0; i < totalRecords; i += 10000) {
            String solrRequestUrl = solr.getBaseURL() + "/select";
            solrRequestUrl = generateURL(solrRequestUrl, yearQueryParams);

            HttpGet get = new HttpGet(solrRequestUrl);
            HttpResponse response = new DefaultHttpClient().execute(get);
            InputStream csvInputstream = response.getEntity().getContent();
            //Write the csv ouput to a file !
            File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYear()
                    + "." + i + ".csv");
            FileUtils.copyInputStreamToFile(csvInputstream, csvFile);
            filesToUpload.add(csvFile);

            //Add 10000 & start over again
            yearQueryParams.put(CommonParams.START, String.valueOf((i + 10000)));
        }

        for (File tempCsv : filesToUpload) {
            //Upload the data in the csv files to our new solr core
            ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest(
                    "/update/csv");
            contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
            contentStreamUpdateRequest.setParam("skip", "_version_");
            contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");

            statisticsYearServer.request(contentStreamUpdateRequest);
        }
        statisticsYearServer.commit(true, true);

        //Delete contents of this year from our year query !
        solr.deleteByQuery(filterQuery.toString());
        solr.commit(true, true);

        log.info("Moved " + totalRecords + " records into core: " + coreName);
    }

    FileUtils.deleteDirectory(tempDirectory);
}

From source file:org.dspace.statistics.SolrLoggerServiceImpl.java

License:BSD License

protected void addAdditionalSolrYearCores(SolrQuery solrQuery) {
    //Only add if needed
    if (0 < statisticYearCores.size()) {
        //The shards are a comma separated list of the urls to the cores
        solrQuery.add(ShardParams.SHARDS, StringUtils.join(statisticYearCores.iterator(), ","));
    }/* w  w w . ja  v a 2  s.c  om*/

}

From source file:org.dspace.util.SolrImportExport.java

License:BSD License

/**
 * Exports documents from the given index to the specified target directory in batches of #ROWS_PER_FILE, starting at fromWhen (or all documents).
 * See #makeExportFilename for the file names that are generated.
 *
 * @param indexName The index to export.
 * @param toDir The target directory for the export. Will be created if it doesn't exist yet. The directory must be writeable.
 * @param solrUrl The solr URL for the index to export. Must not be null.
 * @param timeField The time field to use for sorting the export. Must not be null.
 * @param fromWhen Optionally, from when to export. See options for allowed values. If null or empty, all documents will be exported.
 * @throws SolrServerException if there is a problem with exporting the index.
 * @throws IOException if there is a problem creating the files or communicating with Solr.
 * @throws SolrImportExportException if there is a problem in communicating with Solr.
 *//*from   w w w .j ava  2  s  .  c  om*/
public static void exportIndex(String indexName, File toDir, String solrUrl, String timeField, String fromWhen)
        throws SolrServerException, IOException, SolrImportExportException {
    if (StringUtils.isBlank(solrUrl)) {
        throw new SolrImportExportException(
                "Could not construct solr URL for index" + indexName + ", aborting export.");
    }

    if (!toDir.exists() || !toDir.canWrite()) {
        throw new SolrImportExportException("Target directory " + toDir
                + " doesn't exist or is not writable, aborting export of index " + indexName);
    }

    HttpSolrServer solr = new HttpSolrServer(solrUrl);

    SolrQuery query = new SolrQuery("*:*");
    if (StringUtils.isNotBlank(fromWhen)) {
        String lastValueFilter = makeFilterQuery(timeField, fromWhen);
        if (StringUtils.isNotBlank(lastValueFilter)) {
            query.addFilterQuery(lastValueFilter);
        }
    }

    query.setRows(0);
    query.setGetFieldStatistics(timeField);
    Map<String, FieldStatsInfo> fieldInfo = solr.query(query).getFieldStatsInfo();
    if (fieldInfo == null || !fieldInfo.containsKey(timeField)) {
        log.warn("Cannot get earliest date, not exporting index " + indexName + ", time field " + timeField
                + ", from " + fromWhen);
        return;
    }
    FieldStatsInfo timeFieldInfo = fieldInfo.get(timeField);
    if (timeFieldInfo == null || timeFieldInfo.getMin() == null) {
        log.warn("Cannot get earliest date, not exporting index " + indexName + ", time field " + timeField
                + ", from " + fromWhen);
        return;
    }
    Date earliestTimestamp = (Date) timeFieldInfo.getMin();

    query.setGetFieldStatistics(false);
    query.clearSorts();
    query.setRows(0);
    query.setFacet(true);
    query.add(FacetParams.FACET_RANGE, timeField);
    query.add(FacetParams.FACET_RANGE_START, SOLR_DATE_FORMAT.format(earliestTimestamp) + "/MONTH");
    query.add(FacetParams.FACET_RANGE_END, "NOW/MONTH+1MONTH");
    query.add(FacetParams.FACET_RANGE_GAP, "+1MONTH");
    query.setFacetMinCount(1);

    List<RangeFacet.Count> monthFacets = solr.query(query).getFacetRanges().get(0).getCounts();

    for (RangeFacet.Count monthFacet : monthFacets) {
        Date monthStartDate;
        String monthStart = monthFacet.getValue();
        try {
            monthStartDate = SOLR_DATE_FORMAT_NO_MS.parse(monthStart);
        } catch (java.text.ParseException e) {
            throw new SolrImportExportException("Could not read start of month batch as date: " + monthStart,
                    e);
        }
        int docsThisMonth = monthFacet.getCount();

        SolrQuery monthQuery = new SolrQuery("*:*");
        monthQuery.setRows(ROWS_PER_FILE);
        monthQuery.set("wt", "csv");
        monthQuery.set("fl", "*");

        monthQuery.addFilterQuery(timeField + ":[" + monthStart + " TO " + monthStart + "+1MONTH]");

        for (int i = 0; i < docsThisMonth; i += ROWS_PER_FILE) {
            monthQuery.setStart(i);
            URL url = new URL(solrUrl + "/select?" + monthQuery.toString());

            File file = new File(toDir.getCanonicalPath(),
                    makeExportFilename(indexName, monthStartDate, docsThisMonth, i));
            if (file.createNewFile()) {
                FileUtils.copyURLToFile(url, file);
                log.info("Exported batch " + i + " to " + file.getCanonicalPath());
            } else {
                throw new SolrImportExportException("Could not create file " + file.getCanonicalPath()
                        + " while exporting index " + indexName + ", month" + monthStart + ", batch " + i);
            }
        }
    }
}

From source file:org.easyrec.plugin.profilesolr.impl.SolrSimilarityServiceImpl.java

License:Open Source License

@Override
public void matchProfiles(SolrSimilarityConfigurationInt configuration, SolrSimilarityStatistics stats) {

    List<Item> users = solrItemDAO.getUserItems(configuration);
    int count = 0;
    for (Item user : users) {
        Object parsedProfile = configuration.getConfiguration().jsonProvider().parse(user.getProfileData());
        String query = "";
        for (QueryFieldConfiguration fc : configuration.getQueryFields()) {
            List<Object> fields = fc.getJsonPath().read(parsedProfile, configuration.getConfiguration());//Configuration.builder().options(Option.ALWAYS_RETURN_LIST,Option.DEFAULT_PATH_LEAF_TO_NULL).build());
            for (Object field : fields) {
                if (field instanceof String) {
                    //field = (String) field;
                    if (fc.getBoost() != null)
                        field = (String) field + '^' + fc.getBoost();
                    //field = field.substring(field.lastIndexOf("[") + 2, field.length() - 2);
                    query += field + " ";
                } else {
                    stats.setException(//from   w ww . ja  v  a 2 s.com
                            "The following path does not return simple String values an is ignored! Check your settings: "
                                    + fc.getJsonPath().getPath());
                }
            }

        }
        //logger.info("Query:" + query);
        SolrQuery sQuery = new SolrQuery(query);
        sQuery.addFilterQuery("tenant_i:" + configuration.getTenantId());
        sQuery.add("fl", "tenant_i,type_i,id_i,score");
        try {
            QueryResponse response = solrClient.query(sQuery);

            List<SolrItem> list = response.getBeans(SolrItem.class);
            if (!list.isEmpty()) {
                List<ItemAssocVO<Integer, Integer>> itemAssocs = new ArrayList<>();
                Integer userId = idMappingDAO.lookupOnly(user.getItemId());

                for (SolrItem item : list) {
                    ItemAssocVO<Integer, Integer> rule = new ItemAssocVO<>(configuration.getTenantId(),
                            new ItemVO<>(user.getTenantId(), userId, configuration.getItemTypeUser()),
                            configuration.getAssocType(), (double) item.getScore(),
                            new ItemVO<>(item.getTenant(), item.getId(), item.getItemType()),
                            typeMappingService.getIdOfSourceType(configuration.getTenantId(),
                                    SolrSimilarityGenerator.ID.toString() + "/"
                                            + SolrSimilarityGenerator.VERSION),
                            "", typeMappingService.getIdOfViewType(configuration.getTenantId(),
                                    TypeMappingService.VIEW_TYPE_COMMUNITY),
                            true, stats.getStartDate());

                    itemAssocs.add(rule);
                }
                count += itemAssocs.size();
                for (ItemAssocVO<Integer, Integer> itemAssoc : itemAssocs) {
                    itemAssocDAO.insertOrUpdateItemAssoc(itemAssoc);
                }

            }

        } catch (SolrServerException ex) {
            logger.debug("An Excption occured quering solr.", ex);
        } catch (IOException ex) {
            logger.debug("An Excption occured quering solr.", ex);
        }
    }
    stats.setSizeRules(count);
    stats.setNumberOfRulesCreated(count);

}

From source file:org.emonocot.persistence.dao.hibernate.SearchableDaoImpl.java

License:Open Source License

/**
 * @param query//w w w .  j  a v a 2s. c o  m
 *            A lucene query
 * @param spatialQuery
 *            A spatial query to filter the results by
 * @param pageSize
 *            The maximum number of results to return
 * @param pageNumber
 *            The offset (in pageSize chunks, 0-based) from the beginning of
 *            the recordset
 * @param facets
 *            The names of the facets you want to calculate
 * @param selectedFacets
 *            A map of facets which you would like to restrict the search by
 * @param sort
 *            A representation for the order results should be returned in
 * @param fetch
 *            Set the fetch profile
 * @return a Page from the resultset
 * @throws SolrServerException
 */
public final Page<T> search(final String query, final String spatialQuery, final Integer pageSize,
        final Integer pageNumber, final String[] facets, Map<String, String> facetPrefixes,
        final Map<String, String> selectedFacets, final String sort, final String fetch)
        throws SolrServerException {
    SolrQuery solrQuery = prepareQuery(query, sort, pageSize, pageNumber, selectedFacets);
    solrQuery.set("spellcheck", "true");
    solrQuery.set("spellcheck.collate", "true");
    solrQuery.set("spellcheck.count", "1");
    solrQuery.set("bq", "base.class_s:org.emonocot.model.Taxon^2.0");

    // Filter the searchable objects out
    solrQuery.addFilterQuery("base.class_searchable_b:" + isSearchableObject());

    if (spatialQuery != null && spatialQuery.trim().length() != 0) {
        solrQuery.addFilterQuery(spatialQuery);
    }

    if (facets != null && facets.length != 0) {
        solrQuery.setFacet(true);
        solrQuery.setFacetMinCount(1);
        solrQuery.setFacetSort(FacetParams.FACET_SORT_INDEX);

        for (String facet : facets) {
            if (facet.equals("base.class_s")) {
                solrQuery.setParam("f.base.class_s.facet.sort", FacetParams.FACET_SORT_COUNT);
            }
            if (facet.endsWith("_dt")) {
                /**
                 * Is a date facet. Once Solr 4.2 is released, we can implement variable length buckets, but for now
                 * stick with fixed buckets https://issues.apache.org/jira/browse/SOLR-2366
                 */

                solrQuery.add("facet.range", facet);
                solrQuery.add("f." + facet + ".facet.range.start", "NOW/DAY-1YEARS");
                solrQuery.add("f." + facet + ".facet.range.end", "NOW/DAY");
                solrQuery.add("f." + facet + ".facet.range.gap", "+1MONTH");
            } else {
                solrQuery.addFacetField(facet);
            }
            includeMissing(solrQuery, facet);
        }
        if (facetPrefixes != null) {
            for (String facet : facetPrefixes.keySet()) {
                solrQuery.add("f." + facet + ".facet.prefix", facetPrefixes.get(facet));
            }
        }
    }
    QueryResponse queryResponse = solrServer.query(solrQuery);

    List<T> results = new ArrayList<T>();
    for (SolrDocument solrDocument : queryResponse.getResults()) {
        T object = loadObjectForDocument(solrDocument);
        enableProfilePostQuery(object, fetch);
        results.add(object);
    }

    Long totalResults = new Long(queryResponse.getResults().getNumFound());
    Page<T> page = new DefaultPageImpl<T>(totalResults.intValue(), pageNumber, pageSize, results,
            queryResponse);
    if (selectedFacets != null) {
        page.setSelectedFacets(selectedFacets);
    }
    page.setSort(sort);

    return page;
}

From source file:org.emonocot.persistence.dao.hibernate.SearchableDaoImpl.java

License:Open Source License

public CellSet analyse(String rows, String cols, Integer firstCol, Integer maxCols, Integer firstRow,
        Integer maxRows, Map<String, String> selectedFacets, String[] facets, Cube cube)
        throws SolrServerException {
    SolrQuery query = new SolrQuery();
    query.setQuery("*:*");
    SolrQuery totalQuery = new SolrQuery();
    totalQuery.setQuery("*:*");

    // We're not interested in the results themselves
    query.setRows(1);//  w ww .j  av  a  2 s . c o  m
    query.setStart(0);
    totalQuery.setRows(1);
    totalQuery.setStart(0);

    if (rows == null) {
        query.setFacet(true);
        query.setFacetMinCount(1);
        query.setFacetSort(FacetParams.FACET_SORT_INDEX);
        query.addFacetField(cube.getDefaultLevel());
        includeMissing(query, cube.getDefaultLevel());
        includeMissing(totalQuery, cube.getDefaultLevel());
        if (maxRows != null) {
            totalQuery.setFacet(true);
            totalQuery.setFacetMinCount(1);
            totalQuery.addFacetField("{!key=totalRows}" + cube.getDefaultLevel());

            query.add("f." + cube.getDefaultLevel() + ".facet.limit", maxRows.toString());
            query.add("f." + cube.getDefaultLevel() + ".facet.mincount", "1");
            if (firstRow != null) {
                query.add("f." + cube.getDefaultLevel() + ".facet.offset", firstRow.toString());
            }
        }
    } else if (cols == null) {
        query.setFacet(true);
        query.setFacetMinCount(1);
        query.setFacetSort(FacetParams.FACET_SORT_INDEX);
        query.addFacetField(rows);
        includeMissing(query, rows);
        includeMissing(totalQuery, rows);
        if (maxRows != null) {
            totalQuery.setFacet(true);
            totalQuery.setFacetMinCount(1);
            totalQuery.addFacetField("{!key=totalRows}" + rows);
            query.add("f." + rows + ".facet.limit", maxRows.toString());
            query.add("f." + rows + ".facet.mincount", "1");
            if (firstRow != null) {
                query.add("f." + rows + ".facet.offset", firstRow.toString());
            }
        }
        if (cube.getLevel(rows).isMultiValued() && cube.getLevel(rows).getHigher() != null) {
            Level higher = cube.getLevel(rows).getHigher();
            totalQuery.add("f." + rows + ".facet.prefix", selectedFacets.get(higher.getFacet()) + "_");
            query.add("f." + rows + ".facet.prefix", selectedFacets.get(higher.getFacet()) + "_");
        }
    } else {
        query.setFacet(true);
        query.setFacetMinCount(1);
        query.setFacetSort(FacetParams.FACET_SORT_INDEX);
        query.addFacetField(rows);
        includeMissing(query, rows);
        includeMissing(totalQuery, rows);
        if (maxRows != null) {
            totalQuery.setFacet(true);
            totalQuery.setFacetMinCount(1);
            totalQuery.addFacetField("{!key=totalRows}" + rows);
            query.add("f." + rows + ".facet.limit", maxRows.toString());
            query.add("f." + rows + ".facet.mincount", "1");
            if (firstRow != null) {
                query.add("f." + rows + ".facet.offset", firstRow.toString());
            }
        }
        if (cube.getLevel(rows).isMultiValued() && cube.getLevel(rows).getHigher() != null) {
            Level higher = cube.getLevel(rows).getHigher();
            totalQuery.add("f." + rows + ".facet.prefix", selectedFacets.get(higher.getFacet()) + "_");
            query.add("f." + rows + ".facet.prefix", selectedFacets.get(higher.getFacet()) + "_");
        }
        query.addFacetField(cols);
        includeMissing(query, cols);
        if (maxCols != null) {
            totalQuery.setFacet(true);
            totalQuery.setFacetMinCount(1);
            totalQuery.addFacetField("{!key=totalCols}" + cols);
            /**
             * Facet pivot does not behave the same way on columns - the limit is
             */
            //query.add("f." + cols + ".facet.limit", maxCols.toString());
            //query.add("f." + cols + ".facet.mincount", "1");
            //if (firstCol != null) {
            //   query.add("f." + cols + ".facet.offset", firstCol.toString());
            //}
        }
        if (cube.getLevel(cols).isMultiValued() && cube.getLevel(cols).getHigher() != null) {
            Level higher = cube.getLevel(cols).getHigher();
            totalQuery.add("f." + cols + ".facet.prefix", selectedFacets.get(higher.getFacet()) + "_");
            query.add("f." + cols + ".facet.prefix", selectedFacets.get(higher.getFacet()) + "_");
        }
        query.addFacetPivotField(rows + "," + cols);
    }

    if (selectedFacets != null && !selectedFacets.isEmpty()) {
        for (String facetName : selectedFacets.keySet()) {
            String facetValue = selectedFacets.get(facetName);
            if (StringUtils.isNotEmpty(facetValue)) {
                totalQuery.addFilterQuery(facetName + ":" + selectedFacets.get(facetName));
                query.addFilterQuery(facetName + ":" + selectedFacets.get(facetName));
            } else {//Subtract/Exclude documents with any value for the facet
                totalQuery.addFilterQuery("-" + facetName + ":[* TO *]");
                query.addFilterQuery("-" + facetName + ":[* TO *]");
            }
        }
    }

    if (facets != null && facets.length != 0) {
        query.setFacet(true);
        query.setFacetMinCount(1);
        query.setFacetSort(FacetParams.FACET_SORT_INDEX);

        for (String facetName : facets) {
            if (rows != null && rows.equals(facetName)) {
            } else if (cols != null && cols.equals(facetName)) {
            } else if (rows == null && facetName.equals(cube.getDefaultLevel())) {
            } else {
                includeMissing(query, facetName);
                query.addFacetField(facetName);
            }
        }
    }

    QueryResponse response = solrServer.query(query);
    QueryResponse totalResponse = solrServer.query(totalQuery);
    FacetField totalRows = null;
    FacetField totalCols = null;
    if (totalResponse.getFacetField("totalRows") != null) {
        totalRows = totalResponse.getFacetField("totalRows");
    }

    if (totalResponse.getFacetField("totalCols") != null) {
        totalCols = totalResponse.getFacetField("totalCols");
    }

    CellSet cellSet = new CellSet(response, selectedFacets, query, rows, cols, firstRow, maxRows, firstCol,
            maxCols, totalRows, totalCols, cube);

    return cellSet;
}