Example usage for org.apache.solr.common.params FacetParams FACET_RANGE

List of usage examples for org.apache.solr.common.params FacetParams FACET_RANGE

Introduction

In this page you can find the example usage for org.apache.solr.common.params FacetParams FACET_RANGE.

Prototype

String FACET_RANGE

To view the source code for org.apache.solr.common.params FacetParams FACET_RANGE.

Click Source Link

Document

Any numerical field whose terms the user wants to enumerate over Facet Contraint Counts for selected ranges.

Usage

From source file:fi.nationallibrary.ndl.solr.request.RangeFieldFacets.java

License:Apache License

public NamedList getFacetRangeCounts() throws IOException, ParseException {
    final NamedList resOuter = new SimpleOrderedMap();
    final String[] fields = params.getParams(FacetParams.FACET_RANGE);

    if (null == fields || 0 == fields.length)
        return resOuter;

    for (String f : fields) {
        this.getFacetRangeCounts(f, resOuter);
    }/*from  w  w w  .  j a v a  2  s.  c  om*/

    return resOuter;
}

From source file:fi.nationallibrary.ndl.solr.request.RangeFieldFacets.java

License:Apache License

void getFacetRangeCounts(String facetRange, NamedList resOuter) throws IOException, ParseException {

    final IndexSchema schema = searcher.getSchema();

    parseParams(FacetParams.FACET_RANGE, facetRange);
    String f = facetValue;/*  w  w w.  j  a v a  2 s .  c o  m*/

    SchemaField rootSf = schema.getField(f);
    FieldType rootFt = rootSf.getType();

    final SchemaField sf;
    final FieldType ft;
    if (rootFt instanceof RangeField) {
        sf = ((RangeField) rootFt).getSubField(rootSf);
        ft = ((RangeField) rootFt).getSubType();
    } else {
        sf = rootSf;
        ft = rootFt;
    }

    RangeEndpointCalculator calc = null;

    if (ft instanceof TrieField) {
        final TrieField trie = (TrieField) ft;

        switch (trie.getType()) {
        case FLOAT:
            calc = new FloatRangeEndpointCalculator(sf);
            break;
        case DOUBLE:
            calc = new DoubleRangeEndpointCalculator(sf);
            break;
        case INTEGER:
            calc = new IntegerRangeEndpointCalculator(sf);
            break;
        case LONG:
            calc = new LongRangeEndpointCalculator(sf);
            break;
        default:
            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                    "Unable to range facet on tried field of unexpected type:" + f);
        }
    } else if (ft instanceof DateField) {
        calc = new DateRangeEndpointCalculator(sf, NOW);
    } else if (ft instanceof SortableIntField) {
        calc = new IntegerRangeEndpointCalculator(sf);
    } else if (ft instanceof SortableLongField) {
        calc = new LongRangeEndpointCalculator(sf);
    } else if (ft instanceof SortableFloatField) {
        calc = new FloatRangeEndpointCalculator(sf);
    } else if (ft instanceof SortableDoubleField) {
        calc = new DoubleRangeEndpointCalculator(sf);
    } else {
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unable to range facet on field:" + sf);
    }

    resOuter.add(key, getFacetRangeCounts(rootSf, calc));
}

From source file:org.apache.jackrabbit.core.query.lucene.FacetHandler.java

License:Open Source License

private void extractFacetParameters(String facetFunctionPrefix, NamedList<Object> parameters, int counter,
        Map.Entry<String, PropertyValue> column) throws RepositoryException {
    // first extract options from rep:facet() from column key
    final String key = column.getKey();
    final String facetOptions = key.substring(key.indexOf(facetFunctionPrefix) + facetFunctionPrefix.length(),
            key.lastIndexOf(")"));

    // remember nodetype and query values if encountered so that we can process them once the whole facet is parsed
    String nodeType = null;/* ww w. j  av  a 2 s. c o m*/
    List<String> unparsedQueries = null;

    // loop invariants
    final String columnPropertyName = column.getValue().getPropertyName();
    final String propertyName = columnPropertyName + SimpleJahiaJcrFacets.PROPNAME_INDEX_SEPARATOR + counter;

    // we can assert the facet type by checking whether the the options String contains date or range, otherwise, the type is field
    final boolean isQuery = facetOptions.contains(FacetParams.FACET_QUERY);
    String facetType = FacetParams.FACET_FIELD; // default facet type
    if (isQuery) {
        facetType = FacetParams.FACET_QUERY;
    } else if (facetOptions.contains("date")) {
        facetType = FacetParams.FACET_DATE;
    } else if (facetOptions.contains("range")) {
        facetType = FacetParams.FACET_RANGE;
    }
    parameters.add(facetType, propertyName);

    // populate parameters
    // each parameter name/value pair is separated from the next one by & so split on this
    final String[] paramPairs = StringUtils.split(facetOptions, "&");
    for (String paramPair : paramPairs) {
        // for each pair, extract the name and value separated by =
        int separator = paramPair.indexOf('=');
        if (separator >= 0) { // todo: what should we do if a pair doesn't have an equal sign in it?
            final String paramName = paramPair.substring(0, separator);
            final String paramValue = paramPair.substring(separator + 1);

            // some parameters need to be specially processed and not be added as others so process them and exit current iteration when encountered
            if (paramName.equals("nodetype")) {
                nodeType = paramValue; // remember node type value for later processing
                continue;
            } else if (paramName.contains("query")) {
                if (unparsedQueries == null) {
                    unparsedQueries = new LinkedList<String>();
                }
                unparsedQueries.add(paramValue); // remember query value for later processing
                continue;
            }

            // create full parameter name and add its value to the parameters
            String facetOption = getFacetOption(paramName);
            parameters.add(getFullParameterName(propertyName, facetOption), paramValue);
        }
    }

    // node type parameter
    if (StringUtils.isEmpty(nodeType)) {
        // if we didn't have a node type specified in the given options, extract it from the selector name and create the associated parameter
        nodeType = getNodeTypeFromSelector(column.getValue().getSelectorName(), columnPropertyName);
    }

    // only add node type parameter if we're not dealing with a query
    if (!isQuery) {
        parameters.add(getFullParameterName(propertyName, getFacetOption("nodetype")), nodeType);
    }

    // deal with embedded query if needed, at this point, nodeType will have been either extracted or asserted from selector name
    if (unparsedQueries != null) {
        ExtendedPropertyDefinition epd = NodeTypeRegistry.getInstance().getNodeType(nodeType)
                .getPropertyDefinition(columnPropertyName);

        for (String unparsedQuery : unparsedQueries) {
            if (unparsedQuery.split("(?<!\\\\):").length == 1 && !columnPropertyName.equals("rep:facet()")) {
                if (epd != null) {
                    String fieldNameInIndex = getFieldNameInIndex(propertyName, epd, "");
                    unparsedQuery = QueryParser.escape(fieldNameInIndex) + ":" + unparsedQuery;
                }
            }
            parameters.add(getFullParameterName(propertyName, "query"), unparsedQuery);
        }
    }
}

From source file:org.dspace.statistics.SolrLogger.java

License:BSD License

public static void shardSolrIndex() throws IOException, SolrServerException {
    /*//from ww  w.  jav  a 2  s.  co m
    Start by faceting by year so we can include each year in a separate core !
     */
    SolrQuery yearRangeQuery = new SolrQuery();
    yearRangeQuery.setQuery("*:*");
    yearRangeQuery.setRows(0);
    yearRangeQuery.setFacet(true);
    yearRangeQuery.add(FacetParams.FACET_RANGE, "time");
    //We go back to 2000 the year 2000, this is a bit overkill but this way we ensure we have everything
    //The alternative would be to sort but that isn't recommended since it would be a very costly query !
    yearRangeQuery.add(FacetParams.FACET_RANGE_START,
            "NOW/YEAR-" + (Calendar.getInstance().get(Calendar.YEAR) - 2000) + "YEARS");
    //Add the +0year to ensure that we DO NOT include the current year
    yearRangeQuery.add(FacetParams.FACET_RANGE_END, "NOW/YEAR+0YEARS");
    yearRangeQuery.add(FacetParams.FACET_RANGE_GAP, "+1YEAR");
    yearRangeQuery.add(FacetParams.FACET_MINCOUNT, String.valueOf(1));

    //Create a temp directory to store our files in !
    File tempDirectory = new File(
            ConfigurationManager.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
    tempDirectory.mkdirs();

    QueryResponse queryResponse = solr.query(yearRangeQuery);
    //We only have one range query !
    List<RangeFacet.Count> yearResults = queryResponse.getFacetRanges().get(0).getCounts();
    for (RangeFacet.Count count : yearResults) {
        long totalRecords = count.getCount();

        //Create a range query from this !
        //We start with out current year
        DCDate dcStart = new DCDate(count.getValue());
        Calendar endDate = Calendar.getInstance();
        //Advance one year for the start of the next one !
        endDate.setTime(dcStart.toDate());
        endDate.add(Calendar.YEAR, 1);
        DCDate dcEndDate = new DCDate(endDate.getTime());

        StringBuilder filterQuery = new StringBuilder();
        filterQuery.append("time:([");
        filterQuery.append(ClientUtils.escapeQueryChars(dcStart.toString()));
        filterQuery.append(" TO ");
        filterQuery.append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append("]");
        //The next part of the filter query excludes the content from midnight of the next year !
        filterQuery.append(" NOT ").append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append(")");

        Map<String, String> yearQueryParams = new HashMap<String, String>();
        yearQueryParams.put(CommonParams.Q, "*:*");
        yearQueryParams.put(CommonParams.ROWS, String.valueOf(10000));
        yearQueryParams.put(CommonParams.FQ, filterQuery.toString());
        yearQueryParams.put(CommonParams.WT, "csv");

        //Start by creating a new core
        String coreName = "statistics-" + dcStart.getYear();
        HttpSolrServer statisticsYearServer = createCore(solr, coreName);

        System.out.println("Moving: " + totalRecords + " into core " + coreName);
        log.info("Moving: " + totalRecords + " records into core " + coreName);

        List<File> filesToUpload = new ArrayList<File>();
        for (int i = 0; i < totalRecords; i += 10000) {
            String solrRequestUrl = solr.getBaseURL() + "/select";
            solrRequestUrl = generateURL(solrRequestUrl, yearQueryParams);

            HttpGet get = new HttpGet(solrRequestUrl);
            HttpResponse response = new DefaultHttpClient().execute(get);
            InputStream csvInputstream = response.getEntity().getContent();
            //Write the csv ouput to a file !
            File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYear()
                    + "." + i + ".csv");
            FileUtils.copyInputStreamToFile(csvInputstream, csvFile);
            filesToUpload.add(csvFile);

            //Add 10000 & start over again
            yearQueryParams.put(CommonParams.START, String.valueOf((i + 10000)));
        }

        for (File tempCsv : filesToUpload) {
            //Upload the data in the csv files to our new solr core
            ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest(
                    "/update/csv");
            contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
            contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");

            statisticsYearServer.request(contentStreamUpdateRequest);
        }
        statisticsYearServer.commit(true, true);

        //Delete contents of this year from our year query !
        solr.deleteByQuery(filterQuery.toString());
        solr.commit(true, true);

        log.info("Moved " + totalRecords + " records into core: " + coreName);
    }

    FileUtils.deleteDirectory(tempDirectory);
}

From source file:org.dspace.statistics.SolrLoggerServiceImpl.java

License:BSD License

@Override
public void shardSolrIndex() throws IOException, SolrServerException {
    /*//from  w  w w  .  j  a  va2s . c om
    Start by faceting by year so we can include each year in a separate core !
     */
    SolrQuery yearRangeQuery = new SolrQuery();
    yearRangeQuery.setQuery("*:*");
    yearRangeQuery.setRows(0);
    yearRangeQuery.setFacet(true);
    yearRangeQuery.add(FacetParams.FACET_RANGE, "time");
    //We go back to 2000 the year 2000, this is a bit overkill but this way we ensure we have everything
    //The alternative would be to sort but that isn't recommended since it would be a very costly query !
    yearRangeQuery.add(FacetParams.FACET_RANGE_START,
            "NOW/YEAR-" + (Calendar.getInstance().get(Calendar.YEAR) - 2000) + "YEARS");
    //Add the +0year to ensure that we DO NOT include the current year
    yearRangeQuery.add(FacetParams.FACET_RANGE_END, "NOW/YEAR+0YEARS");
    yearRangeQuery.add(FacetParams.FACET_RANGE_GAP, "+1YEAR");
    yearRangeQuery.add(FacetParams.FACET_MINCOUNT, String.valueOf(1));

    //Create a temp directory to store our files in !
    File tempDirectory = new File(
            configurationService.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
    tempDirectory.mkdirs();

    QueryResponse queryResponse = solr.query(yearRangeQuery);
    //We only have one range query !
    List<RangeFacet.Count> yearResults = queryResponse.getFacetRanges().get(0).getCounts();
    for (RangeFacet.Count count : yearResults) {
        long totalRecords = count.getCount();

        //Create a range query from this !
        //We start with out current year
        DCDate dcStart = new DCDate(count.getValue());
        Calendar endDate = Calendar.getInstance();
        //Advance one year for the start of the next one !
        endDate.setTime(dcStart.toDate());
        endDate.add(Calendar.YEAR, 1);
        DCDate dcEndDate = new DCDate(endDate.getTime());

        StringBuilder filterQuery = new StringBuilder();
        filterQuery.append("time:([");
        filterQuery.append(ClientUtils.escapeQueryChars(dcStart.toString()));
        filterQuery.append(" TO ");
        filterQuery.append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append("]");
        //The next part of the filter query excludes the content from midnight of the next year !
        filterQuery.append(" NOT ").append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append(")");

        Map<String, String> yearQueryParams = new HashMap<String, String>();
        yearQueryParams.put(CommonParams.Q, "*:*");
        yearQueryParams.put(CommonParams.ROWS, String.valueOf(10000));
        yearQueryParams.put(CommonParams.FQ, filterQuery.toString());
        yearQueryParams.put(CommonParams.WT, "csv");

        //Start by creating a new core
        String coreName = "statistics-" + dcStart.getYear();
        HttpSolrServer statisticsYearServer = createCore(solr, coreName);

        System.out.println("Moving: " + totalRecords + " into core " + coreName);
        log.info("Moving: " + totalRecords + " records into core " + coreName);

        List<File> filesToUpload = new ArrayList<File>();
        for (int i = 0; i < totalRecords; i += 10000) {
            String solrRequestUrl = solr.getBaseURL() + "/select";
            solrRequestUrl = generateURL(solrRequestUrl, yearQueryParams);

            HttpGet get = new HttpGet(solrRequestUrl);
            HttpResponse response = new DefaultHttpClient().execute(get);
            InputStream csvInputstream = response.getEntity().getContent();
            //Write the csv ouput to a file !
            File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYear()
                    + "." + i + ".csv");
            FileUtils.copyInputStreamToFile(csvInputstream, csvFile);
            filesToUpload.add(csvFile);

            //Add 10000 & start over again
            yearQueryParams.put(CommonParams.START, String.valueOf((i + 10000)));
        }

        for (File tempCsv : filesToUpload) {
            //Upload the data in the csv files to our new solr core
            ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest(
                    "/update/csv");
            contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
            contentStreamUpdateRequest.setParam("skip", "_version_");
            contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");

            statisticsYearServer.request(contentStreamUpdateRequest);
        }
        statisticsYearServer.commit(true, true);

        //Delete contents of this year from our year query !
        solr.deleteByQuery(filterQuery.toString());
        solr.commit(true, true);

        log.info("Moved " + totalRecords + " records into core: " + coreName);
    }

    FileUtils.deleteDirectory(tempDirectory);
}

From source file:org.dspace.util.SolrImportExport.java

License:BSD License

/**
 * Exports documents from the given index to the specified target directory in batches of #ROWS_PER_FILE, starting at fromWhen (or all documents).
 * See #makeExportFilename for the file names that are generated.
 *
 * @param indexName The index to export.
 * @param toDir The target directory for the export. Will be created if it doesn't exist yet. The directory must be writeable.
 * @param solrUrl The solr URL for the index to export. Must not be null.
 * @param timeField The time field to use for sorting the export. Must not be null.
 * @param fromWhen Optionally, from when to export. See options for allowed values. If null or empty, all documents will be exported.
 * @throws SolrServerException if there is a problem with exporting the index.
 * @throws IOException if there is a problem creating the files or communicating with Solr.
 * @throws SolrImportExportException if there is a problem in communicating with Solr.
 *///  www.  java  2  s .  c  om
public static void exportIndex(String indexName, File toDir, String solrUrl, String timeField, String fromWhen)
        throws SolrServerException, IOException, SolrImportExportException {
    if (StringUtils.isBlank(solrUrl)) {
        throw new SolrImportExportException(
                "Could not construct solr URL for index" + indexName + ", aborting export.");
    }

    if (!toDir.exists() || !toDir.canWrite()) {
        throw new SolrImportExportException("Target directory " + toDir
                + " doesn't exist or is not writable, aborting export of index " + indexName);
    }

    HttpSolrServer solr = new HttpSolrServer(solrUrl);

    SolrQuery query = new SolrQuery("*:*");
    if (StringUtils.isNotBlank(fromWhen)) {
        String lastValueFilter = makeFilterQuery(timeField, fromWhen);
        if (StringUtils.isNotBlank(lastValueFilter)) {
            query.addFilterQuery(lastValueFilter);
        }
    }

    query.setRows(0);
    query.setGetFieldStatistics(timeField);
    Map<String, FieldStatsInfo> fieldInfo = solr.query(query).getFieldStatsInfo();
    if (fieldInfo == null || !fieldInfo.containsKey(timeField)) {
        log.warn("Cannot get earliest date, not exporting index " + indexName + ", time field " + timeField
                + ", from " + fromWhen);
        return;
    }
    FieldStatsInfo timeFieldInfo = fieldInfo.get(timeField);
    if (timeFieldInfo == null || timeFieldInfo.getMin() == null) {
        log.warn("Cannot get earliest date, not exporting index " + indexName + ", time field " + timeField
                + ", from " + fromWhen);
        return;
    }
    Date earliestTimestamp = (Date) timeFieldInfo.getMin();

    query.setGetFieldStatistics(false);
    query.clearSorts();
    query.setRows(0);
    query.setFacet(true);
    query.add(FacetParams.FACET_RANGE, timeField);
    query.add(FacetParams.FACET_RANGE_START, SOLR_DATE_FORMAT.format(earliestTimestamp) + "/MONTH");
    query.add(FacetParams.FACET_RANGE_END, "NOW/MONTH+1MONTH");
    query.add(FacetParams.FACET_RANGE_GAP, "+1MONTH");
    query.setFacetMinCount(1);

    List<RangeFacet.Count> monthFacets = solr.query(query).getFacetRanges().get(0).getCounts();

    for (RangeFacet.Count monthFacet : monthFacets) {
        Date monthStartDate;
        String monthStart = monthFacet.getValue();
        try {
            monthStartDate = SOLR_DATE_FORMAT_NO_MS.parse(monthStart);
        } catch (java.text.ParseException e) {
            throw new SolrImportExportException("Could not read start of month batch as date: " + monthStart,
                    e);
        }
        int docsThisMonth = monthFacet.getCount();

        SolrQuery monthQuery = new SolrQuery("*:*");
        monthQuery.setRows(ROWS_PER_FILE);
        monthQuery.set("wt", "csv");
        monthQuery.set("fl", "*");

        monthQuery.addFilterQuery(timeField + ":[" + monthStart + " TO " + monthStart + "+1MONTH]");

        for (int i = 0; i < docsThisMonth; i += ROWS_PER_FILE) {
            monthQuery.setStart(i);
            URL url = new URL(solrUrl + "/select?" + monthQuery.toString());

            File file = new File(toDir.getCanonicalPath(),
                    makeExportFilename(indexName, monthStartDate, docsThisMonth, i));
            if (file.createNewFile()) {
                FileUtils.copyURLToFile(url, file);
                log.info("Exported batch " + i + " to " + file.getCanonicalPath());
            } else {
                throw new SolrImportExportException("Could not create file " + file.getCanonicalPath()
                        + " while exporting index " + indexName + ", month" + monthStart + ", batch " + i);
            }
        }
    }
}

From source file:org.jahia.services.search.facets.SimpleJahiaJcrFacets.java

License:Open Source License

/**
 * Returns a list of value constraints and the associated facet
 * counts for each facet numerical field, range, and interval
 * specified in the SolrParams//from w w w.  j a  va  2 s  . co m
 *
 * @see FacetParams#FACET_RANGE
 */

public NamedList<Object> getFacetRangeCounts() {
    final NamedList<Object> resOuter = new SimpleOrderedMap<Object>();
    final String[] fields = params.getParams(FacetParams.FACET_RANGE);

    if (null == fields || 0 == fields.length)
        return resOuter;

    for (String f : fields) {
        try {
            getFacetRangeCounts(f, resOuter);
        } catch (Exception e) {
            String msg = "Exception during facet.range of " + f;
            SolrException.logOnce(SolrCore.log, msg, e);
            addException(msg, e);
        }
    }

    return resOuter;
}

From source file:org.jahia.services.search.facets.SimpleJahiaJcrFacets.java

License:Open Source License

void getFacetRangeCounts(String facetRange, NamedList<Object> resOuter)
        throws IOException, ParseException, RepositoryException {

    parseParams(FacetParams.FACET_RANGE, facetRange);
    String f = facetValue;//from w w w . j a va  2s.c  o  m

    String fieldName = StringUtils.substringBeforeLast(f, PROPNAME_INDEX_SEPARATOR);
    ExtendedPropertyDefinition epd = NodeTypeRegistry.getInstance()
            .getNodeType(params.get("f." + f + ".facet.nodetype")).getPropertyDefinition(fieldName);
    String fieldNameInIndex = getFieldNameInIndex(f, fieldName, epd, params.getFieldParam(f, "facet.locale"));
    SchemaField sf = new SchemaField(fieldNameInIndex, getType(epd));
    final FieldType ft = sf.getType();

    RangeEndpointCalculator<? extends Comparable> calc = null;

    if (ft instanceof TrieField) {
        final TrieField trie = (TrieField) ft;

        switch (trie.getType()) {
        case FLOAT:
            calc = new FloatRangeEndpointCalculator(sf);
            break;
        case DOUBLE:
            calc = new DoubleRangeEndpointCalculator(sf);
            break;
        case INTEGER:
            calc = new IntegerRangeEndpointCalculator(sf);
            break;
        case LONG:
            calc = new LongRangeEndpointCalculator(sf);
            break;
        default:
            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                    "Unable to range facet on tried field of unexpected type:" + f);
        }
    } else if (ft instanceof DateField) {
        calc = new DateRangeEndpointCalculator(sf, NOW);
    } else if (ft instanceof SortableIntField) {
        calc = new IntegerRangeEndpointCalculator(sf);
    } else if (ft instanceof SortableLongField) {
        calc = new LongRangeEndpointCalculator(sf);
    } else if (ft instanceof SortableFloatField) {
        calc = new FloatRangeEndpointCalculator(sf);
    } else if (ft instanceof SortableDoubleField) {
        calc = new DoubleRangeEndpointCalculator(sf);
    } else {
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unable to range facet on field:" + sf);
    }

    resOuter.add(key, getFacetRangeCounts(sf, f, calc));
}