Example usage for org.apache.solr.client.solrj SolrQuery clearSorts

List of usage examples for org.apache.solr.client.solrj SolrQuery clearSorts

Introduction

In this page you can find the example usage for org.apache.solr.client.solrj SolrQuery clearSorts.

Prototype

public SolrQuery clearSorts() 

Source Link

Document

Clears current sort information.

Usage

From source file:net.yacy.cora.federate.solr.connector.AbstractSolrConnector.java

License:Open Source License

public static SolrQuery getSolrQuery(final String querystring, final String sort, final int offset,
        final int count, final String... fields) {
    // construct query
    final SolrQuery params = new SolrQuery();
    //if (count < 2 && querystring.startsWith("{!raw f=")) {
    //    params.setQuery("*:*");
    //    params.addFilterQuery(querystring);
    //} else {/*  w  ww .  java2  s. c  om*/
    params.setQuery(querystring);
    //}
    params.clearSorts();
    if (sort != null) {
        params.set(CommonParams.SORT, sort);
    }
    params.setRows(count);
    params.setStart(offset);
    params.setFacet(false);
    if (fields != null && fields.length > 0)
        params.setFields(fields);
    params.setIncludeScore(false);
    params.setParam("defType", "edismax");
    params.setParam(DisMaxParams.QF, CollectionSchema.text_t.getSolrFieldName() + "^1.0");
    return params;
}

From source file:net.yacy.cora.federate.solr.connector.AbstractSolrConnector.java

License:Open Source License

/**
 * check if a given document, identified by url hash as document id exists
 * @param id the url hash and document id
 * @return metadata if any entry in solr exists, null otherwise
 * @throws IOException// ww  w. j  a  v  a  2s .  co  m
 */
@Override
public LoadTimeURL getLoadTimeURL(String id) throws IOException {
    // construct raw query
    final SolrQuery params = new SolrQuery();
    //params.setQuery(CollectionSchema.id.getSolrFieldName() + ":\"" + id + "\"");
    String q = "{!cache=false raw f=" + CollectionSchema.id.getSolrFieldName() + "}" + id;
    params.setQuery(q);
    params.setRows(1);
    params.setStart(0);
    params.setFacet(false);
    params.clearSorts();
    params.setFields(CollectionSchema.id.getSolrFieldName(), CollectionSchema.sku.getSolrFieldName(),
            CollectionSchema.load_date_dt.getSolrFieldName());
    params.setIncludeScore(false);

    // query the server
    final SolrDocumentList sdl = getDocumentListByParams(params);
    if (sdl == null || sdl.getNumFound() <= 0)
        return null;
    SolrDocument doc = sdl.iterator().next();
    LoadTimeURL md = getLoadTimeURL(doc);
    return md;
}

From source file:net.yacy.cora.federate.solr.connector.AbstractSolrConnector.java

License:Open Source License

/**
 * get the number of results when this query is done.
 * This should only be called if the actual result is never used, and only the count is interesting
 * @param querystring//w  w w. j  av  a 2s .  co  m
 * @return the number of results for this query
 */
@Override
public long getCountByQuery(String querystring) throws IOException {
    // construct query
    final SolrQuery params = new SolrQuery();
    params.setQuery(querystring);
    params.setRows(0); // essential to just get count
    params.setStart(0);
    params.setFacet(false);
    params.clearSorts();
    params.setFields(CollectionSchema.id.getSolrFieldName());
    params.setIncludeScore(false);

    // query the server
    final SolrDocumentList sdl = getDocumentListByParams(params);
    return sdl == null ? 0 : sdl.getNumFound();
}

From source file:net.yacy.cora.federate.solr.connector.AbstractSolrConnector.java

License:Open Source License

/**
 * get facets of the index: a list of lists with values that are most common in a specific field
 * @param query a query which is performed to get the facets
 * @param fields the field names which are selected as facet
 * @param maxresults the maximum size of the resulting maps
 * @return a map with key = facet field name, value = an ordered map of field values for that field
 * @throws IOException/*from  www . ja v a2 s.c o  m*/
 */
@Override
public LinkedHashMap<String, ReversibleScoreMap<String>> getFacets(String query, int maxresults,
        final String... fields) throws IOException {
    // construct query
    assert fields.length > 0;
    final SolrQuery params = new SolrQuery();
    params.setQuery(query);
    params.setRows(0);
    params.setStart(0);
    params.setFacet(true);
    params.setFacetMinCount(1); // there are many 0-count facets in the uninverted index cache
    params.setFacetLimit(maxresults);
    params.setFacetSort(FacetParams.FACET_SORT_COUNT);
    params.setParam(FacetParams.FACET_METHOD, FacetParams.FACET_METHOD_fc /*FACET_METHOD_fcs*/);
    params.setFields(fields);
    params.clearSorts();
    params.setIncludeScore(false);
    for (String field : fields)
        params.addFacetField(field);

    // query the server
    QueryResponse rsp = getResponseByParams(params);
    LinkedHashMap<String, ReversibleScoreMap<String>> facets = new LinkedHashMap<String, ReversibleScoreMap<String>>(
            fields.length);
    for (String field : fields) {
        FacetField facet = rsp.getFacetField(field);
        ReversibleScoreMap<String> result = new ClusteredScoreMap<String>(UTF8.insensitiveUTF8Comparator);
        List<Count> values = facet.getValues();
        if (values == null)
            continue;
        for (Count ff : values)
            if (ff.getCount() > 0)
                result.set(ff.getName(), (int) ff.getCount());
        facets.put(field, result);
    }
    return facets;
}

From source file:net.yacy.cora.federate.solr.connector.AbstractSolrConnector.java

License:Open Source License

@Override
public SolrDocument getDocumentById(final String id, final String... fields) throws IOException {
    assert id.length() == Word.commonHashLength : "wrong id: " + id;
    final SolrQuery query = new SolrQuery();
    // construct query
    StringBuilder sb = new StringBuilder(23);
    sb.append("{!cache=false raw f=").append(CollectionSchema.id.getSolrFieldName()).append('}').append(id);
    query.setQuery(sb.toString());//from w  w w  . j  ava  2 s.c o  m
    //query.setQuery("*:*");
    //query.addFilterQuery(sb.toString());
    query.clearSorts();
    query.setRows(1);
    query.setStart(0);
    if (fields != null && fields.length > 0)
        query.setFields(fields);
    query.setIncludeScore(false);

    // query the server
    try {
        final SolrDocumentList docs = getDocumentListByParams(query);
        if (docs == null || docs.isEmpty())
            return null;
        SolrDocument doc = docs.get(0);
        return doc;
    } catch (final Throwable e) {
        clearCaches(); // we clear the in case that this is caused by OOM
        throw new IOException(e.getMessage(), e);
    }
}

From source file:org.dspace.util.SolrImportExport.java

License:BSD License

/**
 * Exports documents from the given index to the specified target directory in batches of #ROWS_PER_FILE, starting at fromWhen (or all documents).
 * See #makeExportFilename for the file names that are generated.
 *
 * @param indexName The index to export.
 * @param toDir The target directory for the export. Will be created if it doesn't exist yet. The directory must be writeable.
 * @param solrUrl The solr URL for the index to export. Must not be null.
 * @param timeField The time field to use for sorting the export. Must not be null.
 * @param fromWhen Optionally, from when to export. See options for allowed values. If null or empty, all documents will be exported.
 * @throws SolrServerException if there is a problem with exporting the index.
 * @throws IOException if there is a problem creating the files or communicating with Solr.
 * @throws SolrImportExportException if there is a problem in communicating with Solr.
 *///from ww  w .  j a  va  2 s . c  o m
public static void exportIndex(String indexName, File toDir, String solrUrl, String timeField, String fromWhen)
        throws SolrServerException, IOException, SolrImportExportException {
    if (StringUtils.isBlank(solrUrl)) {
        throw new SolrImportExportException(
                "Could not construct solr URL for index" + indexName + ", aborting export.");
    }

    if (!toDir.exists() || !toDir.canWrite()) {
        throw new SolrImportExportException("Target directory " + toDir
                + " doesn't exist or is not writable, aborting export of index " + indexName);
    }

    HttpSolrServer solr = new HttpSolrServer(solrUrl);

    SolrQuery query = new SolrQuery("*:*");
    if (StringUtils.isNotBlank(fromWhen)) {
        String lastValueFilter = makeFilterQuery(timeField, fromWhen);
        if (StringUtils.isNotBlank(lastValueFilter)) {
            query.addFilterQuery(lastValueFilter);
        }
    }

    query.setRows(0);
    query.setGetFieldStatistics(timeField);
    Map<String, FieldStatsInfo> fieldInfo = solr.query(query).getFieldStatsInfo();
    if (fieldInfo == null || !fieldInfo.containsKey(timeField)) {
        log.warn("Cannot get earliest date, not exporting index " + indexName + ", time field " + timeField
                + ", from " + fromWhen);
        return;
    }
    FieldStatsInfo timeFieldInfo = fieldInfo.get(timeField);
    if (timeFieldInfo == null || timeFieldInfo.getMin() == null) {
        log.warn("Cannot get earliest date, not exporting index " + indexName + ", time field " + timeField
                + ", from " + fromWhen);
        return;
    }
    Date earliestTimestamp = (Date) timeFieldInfo.getMin();

    query.setGetFieldStatistics(false);
    query.clearSorts();
    query.setRows(0);
    query.setFacet(true);
    query.add(FacetParams.FACET_RANGE, timeField);
    query.add(FacetParams.FACET_RANGE_START, SOLR_DATE_FORMAT.format(earliestTimestamp) + "/MONTH");
    query.add(FacetParams.FACET_RANGE_END, "NOW/MONTH+1MONTH");
    query.add(FacetParams.FACET_RANGE_GAP, "+1MONTH");
    query.setFacetMinCount(1);

    List<RangeFacet.Count> monthFacets = solr.query(query).getFacetRanges().get(0).getCounts();

    for (RangeFacet.Count monthFacet : monthFacets) {
        Date monthStartDate;
        String monthStart = monthFacet.getValue();
        try {
            monthStartDate = SOLR_DATE_FORMAT_NO_MS.parse(monthStart);
        } catch (java.text.ParseException e) {
            throw new SolrImportExportException("Could not read start of month batch as date: " + monthStart,
                    e);
        }
        int docsThisMonth = monthFacet.getCount();

        SolrQuery monthQuery = new SolrQuery("*:*");
        monthQuery.setRows(ROWS_PER_FILE);
        monthQuery.set("wt", "csv");
        monthQuery.set("fl", "*");

        monthQuery.addFilterQuery(timeField + ":[" + monthStart + " TO " + monthStart + "+1MONTH]");

        for (int i = 0; i < docsThisMonth; i += ROWS_PER_FILE) {
            monthQuery.setStart(i);
            URL url = new URL(solrUrl + "/select?" + monthQuery.toString());

            File file = new File(toDir.getCanonicalPath(),
                    makeExportFilename(indexName, monthStartDate, docsThisMonth, i));
            if (file.createNewFile()) {
                FileUtils.copyURLToFile(url, file);
                log.info("Exported batch " + i + " to " + file.getCanonicalPath());
            } else {
                throw new SolrImportExportException("Could not create file " + file.getCanonicalPath()
                        + " while exporting index " + indexName + ", month" + monthStart + ", batch " + i);
            }
        }
    }
}

From source file:org.zenoss.zep.index.impl.solr.SolrEventIndexBackend.java

License:Open Source License

private SolrQuery buildSolrQuery(EventFilter filter, EventFilter exclusionFilter, Integer limit, Integer offset,
        List<EventSort> sortList, SolrFieldFilter fieldFilter) throws ZepException {
    final String query = buildQuery(filter, exclusionFilter);
    SolrQuery solrQuery = new SolrQuery().setQuery(query);

    if (limit != null && limit < MAX_RESULTS && limit > 0)
        solrQuery.setRows(limit);//from www .  j av  a  2 s  .c  o  m
    else
        solrQuery.setRows(MAX_RESULTS);

    if (offset != null)
        solrQuery.setStart(offset);

    if (sortList == null)
        solrQuery.clearSorts();
    else if (sortList.isEmpty())
        solrQuery.addSort(SortClause.desc(IndexConstants.FIELD_LAST_SEEN_TIME));
    else
        for (EventSort sort : sortList)
            for (SortClause clause : createSortClauses(sort))
                solrQuery.addSort(clause);

    if (fieldFilter != null) {
        switch (fieldFilter) {
        case DEFAULTS:
            break;
        case JUST_UUID:
            solrQuery.setFields(IndexConstants.FIELD_UUID);
            break;
        case UUID_LAST_SEEN_AND_PROTOBUF:
            solrQuery.setFields(IndexConstants.FIELD_UUID, IndexConstants.FIELD_LAST_SEEN_TIME,
                    IndexConstants.FIELD_PROTOBUF);
            break;
        case SEARCH_EVENT_TAG_SEVERITIES:
            solrQuery.setFields(IndexConstants.FIELD_ELEMENT_IDENTIFIER,
                    IndexConstants.FIELD_ELEMENT_SUB_IDENTIFIER, IndexConstants.FIELD_SEVERITY,
                    IndexConstants.FIELD_STATUS, IndexConstants.FIELD_TAGS, IndexConstants.FIELD_COUNT);
            break;
        default:
            throw new IllegalStateException("Unexpected fieldFilter: " + fieldFilter);
        }
    }
    solrQuery.setIncludeScore(false);
    solrQuery.setHighlight(false);
    solrQuery.setTerms(false);
    solrQuery.setFacet(false);

    return solrQuery;
}