Example usage for org.apache.solr.common.params CommonParams START

List of usage examples for org.apache.solr.common.params CommonParams START

Introduction

In this page you can find the example usage for org.apache.solr.common.params CommonParams START.

Prototype

String START

To view the source code for org.apache.solr.common.params CommonParams START.

Click Source Link

Document

zero based offset of matching documents to retrieve

Usage

From source file:net.yacy.server.serverObjects.java

License:Open Source License

public MultiMapSolrParams toSolrParams(CollectionSchema[] facets) {
    // check if all required post fields are there
    if (!this.containsKey(CommonParams.DF))
        this.put(CommonParams.DF, CollectionSchema.text_t.getSolrFieldName()); // set default field to the text field
    if (!this.containsKey(CommonParams.START))
        this.put(CommonParams.START, "0"); // set default start item
    if (!this.containsKey(CommonParams.ROWS))
        this.put(CommonParams.ROWS, "10"); // set default number of search results

    if (facets != null && facets.length > 0) {
        this.remove("facet");
        this.put("facet", "true");
        for (int i = 0; i < facets.length; i++)
            this.add("facet.field", facets[i].getSolrFieldName());
    }//  w w  w.j a  va  2 s  .  c o m
    return this.map;
}

From source file:opennlp.tools.similarity.apps.solr.IterativeSearchRequestHandler.java

License:Apache License

public void handleRequestBody1(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {

    // extract params from request
    SolrParams params = req.getParams();
    String q = params.get(CommonParams.Q);
    String[] fqs = params.getParams(CommonParams.FQ);
    int start = 0;
    try {/*from w w w  .  j ava 2 s . c  o m*/
        start = Integer.parseInt(params.get(CommonParams.START));
    } catch (Exception e) {
        /* default */ }
    int rows = 0;
    try {
        rows = Integer.parseInt(params.get(CommonParams.ROWS));
    } catch (Exception e) {
        /* default */ }
    //SolrPluginUtils.setReturnFields(req, rsp);

    // build initial data structures

    SolrDocumentList results = new SolrDocumentList();
    SolrIndexSearcher searcher = req.getSearcher();
    Map<String, SchemaField> fields = req.getSchema().getFields();
    int ndocs = start + rows;
    Filter filter = buildFilter(fqs, req);
    Set<Integer> alreadyFound = new HashSet<Integer>();

    // invoke the various sub-handlers in turn and return results
    doSearch1(results, searcher, q, filter, ndocs, req, fields, alreadyFound);

    // ... more sub-handler calls here ...

    // build and write response
    float maxScore = 0.0F;
    int numFound = 0;
    List<SolrDocument> slice = new ArrayList<SolrDocument>();
    for (Iterator<SolrDocument> it = results.iterator(); it.hasNext();) {
        SolrDocument sdoc = it.next();
        Float score = (Float) sdoc.getFieldValue("score");
        if (maxScore < score) {
            maxScore = score;
        }
        if (numFound >= start && numFound < start + rows) {
            slice.add(sdoc);
        }
        numFound++;
    }
    results.clear();
    results.addAll(slice);
    results.setNumFound(numFound);
    results.setMaxScore(maxScore);
    results.setStart(start);
    rsp.add("response", results);

}

From source file:org.alfresco.solr.component.spellcheck.AlfrescoSpellCheckCollator.java

License:Open Source License

public List<AlfrescoSpellCheckCollation> collate(SpellingResult result, String originalQuery,
        ResponseBuilder ultimateResponse) {
    List<AlfrescoSpellCheckCollation> collations = new ArrayList<>();

    QueryComponent queryComponent = null;
    if (ultimateResponse.components != null) {
        for (SearchComponent sc : ultimateResponse.components) {
            if (sc instanceof QueryComponent) {
                queryComponent = (QueryComponent) sc;
                break;
            }/*  w  ww.  j av a2  s  .  c om*/
        }
    }

    boolean verifyCandidateWithQuery = true;
    int maxTries = maxCollationTries;
    int maxNumberToIterate = maxTries;
    if (maxTries < 1) {
        maxTries = 1;
        maxNumberToIterate = maxCollations;
        verifyCandidateWithQuery = false;
    }
    if (queryComponent == null && verifyCandidateWithQuery) {
        LOG.info(
                "Could not find an instance of QueryComponent. Disabling collation verification against the index.");
        maxTries = 1;
        verifyCandidateWithQuery = false;
    }
    docCollectionLimit = docCollectionLimit > 0 ? docCollectionLimit : 0;
    int maxDocId = -1;
    if (verifyCandidateWithQuery && docCollectionLimit > 0) {
        IndexReader reader = ultimateResponse.req.getSearcher().getIndexReader();
        maxDocId = reader.maxDoc();
    }

    JSONObject alfrescoJSON = (JSONObject) ultimateResponse.req.getContext().get(AbstractQParser.ALFRESCO_JSON);
    String originalAftsQuery = alfrescoJSON != null ? alfrescoJSON.getString("query")
            : ultimateResponse.getQueryString();

    int tryNo = 0;
    int collNo = 0;
    PossibilityIterator possibilityIter = new PossibilityIterator(result.getSuggestions(), maxNumberToIterate,
            maxCollationEvaluations, suggestionsMayOverlap);
    while (tryNo < maxTries && collNo < maxCollations && possibilityIter.hasNext()) {
        PossibilityIterator.RankedSpellPossibility possibility = possibilityIter.next();
        String collationQueryStr = getCollation(originalQuery, possibility.corrections);
        int hits = 0;
        String aftsQuery = null;

        if (verifyCandidateWithQuery) {
            tryNo++;
            SolrQueryRequest req = ultimateResponse.req;
            SolrParams origParams = req.getParams();
            ModifiableSolrParams params = new ModifiableSolrParams(origParams);
            Iterator<String> origParamIterator = origParams.getParameterNamesIterator();
            int pl = SpellingParams.SPELLCHECK_COLLATE_PARAM_OVERRIDE.length();
            while (origParamIterator.hasNext()) {
                String origParamName = origParamIterator.next();
                if (origParamName.startsWith(SpellingParams.SPELLCHECK_COLLATE_PARAM_OVERRIDE)
                        && origParamName.length() > pl) {
                    String[] val = origParams.getParams(origParamName);
                    if (val.length == 1 && val[0].length() == 0) {
                        params.set(origParamName.substring(pl), (String[]) null);
                    } else {
                        params.set(origParamName.substring(pl), val);
                    }
                }
            }
            // we don't set the 'q' param, as we'll pass the query via JSON.
            // params.set(CommonParams.Q, collationQueryStr);
            params.remove(CommonParams.START);
            params.set(CommonParams.ROWS, "" + docCollectionLimit);
            // we don't want any stored fields
            params.set(CommonParams.FL, "id");
            // we'll sort by doc id to ensure no scoring is done.
            params.set(CommonParams.SORT, "_docid_ asc");
            // If a dismax query, don't add unnecessary clauses for scoring
            params.remove(DisMaxParams.TIE);
            params.remove(DisMaxParams.PF);
            params.remove(DisMaxParams.PF2);
            params.remove(DisMaxParams.PF3);
            params.remove(DisMaxParams.BQ);
            params.remove(DisMaxParams.BF);
            // Collate testing does not support Grouping (see SOLR-2577)
            params.remove(GroupParams.GROUP);

            boolean useQStr = true;

            if (alfrescoJSON != null) {
                try {
                    aftsQuery = originalAftsQuery.replaceAll(Pattern.quote(originalQuery),
                            Matcher.quoteReplacement(collationQueryStr));
                    alfrescoJSON.put("query", aftsQuery);
                    req.getContext().put(AbstractQParser.ALFRESCO_JSON, alfrescoJSON);
                    useQStr = false;
                } catch (JSONException e) {
                    LOG.warn("Exception trying to get/set the query from/to ALFRESCO_JSON.]" + e);
                }
            } else {
                aftsQuery = collationQueryStr;
            }
            req.setParams(params);
            // creating a request here... make sure to close it!
            ResponseBuilder checkResponse = new ResponseBuilder(req, new SolrQueryResponse(),
                    Arrays.<SearchComponent>asList(queryComponent));
            checkResponse.setQparser(ultimateResponse.getQparser());
            checkResponse.setFilters(ultimateResponse.getFilters());
            checkResponse.components = Arrays.<SearchComponent>asList(queryComponent);
            if (useQStr) {
                checkResponse.setQueryString(collationQueryStr);
            }
            try {
                queryComponent.prepare(checkResponse);
                if (docCollectionLimit > 0) {
                    int f = checkResponse.getFieldFlags();
                    checkResponse.setFieldFlags(f |= SolrIndexSearcher.TERMINATE_EARLY);
                }
                queryComponent.process(checkResponse);
                hits = (Integer) checkResponse.rsp.getToLog().get("hits");
            } catch (EarlyTerminatingCollectorException etce) {
                assert (docCollectionLimit > 0);
                assert 0 < etce.getNumberScanned();
                assert 0 < etce.getNumberCollected();

                if (etce.getNumberScanned() == maxDocId) {
                    hits = etce.getNumberCollected();
                } else {
                    hits = (int) (((float) (maxDocId * etce.getNumberCollected()))
                            / (float) etce.getNumberScanned());
                }
            } catch (Exception e) {
                LOG.warn(
                        "Exception trying to re-query to check if a spell check possibility would return any hits."
                                + e);
            } finally {
                checkResponse.req.close();
            }
        }
        if (hits > 0 || !verifyCandidateWithQuery) {
            collNo++;
            AlfrescoSpellCheckCollation collation = new AlfrescoSpellCheckCollation();
            collation.setCollationQuery(aftsQuery);
            collation.setCollationQueryString(collationQueryStr);
            collation.setHits(hits);
            collation.setInternalRank(
                    suggestionsMayOverlap ? ((possibility.rank * 1000) + possibility.index) : possibility.rank);

            NamedList<String> misspellingsAndCorrections = new NamedList<>();
            for (SpellCheckCorrection corr : possibility.corrections) {
                misspellingsAndCorrections.add(corr.getOriginal().toString(), corr.getCorrection());
            }
            collation.setMisspellingsAndCorrections(misspellingsAndCorrections);
            collations.add(collation);
        }
        if (LOG.isDebugEnabled()) {
            LOG.debug("Collation: " + aftsQuery
                    + (verifyCandidateWithQuery ? (" will return " + hits + " hits.") : ""));
        }
    }
    return collations;
}

From source file:org.alfresco.solr.query.AbstractQParser.java

License:Open Source License

@Override
public SortSpec getSortSpec(boolean useGlobalParams) throws SyntaxError {

    getQuery(); // ensure query is parsed first

    String sortStr = null;//from  w w  w  .  j a va 2s  .c o m
    String startS = null;
    String rowsS = null;

    if (localParams != null) {
        sortStr = localParams.get(CommonParams.SORT);
        startS = localParams.get(CommonParams.START);
        rowsS = localParams.get(CommonParams.ROWS);

        // if any of these parameters are present, don't go back to the global params
        if (sortStr != null || startS != null || rowsS != null) {
            useGlobalParams = false;
        }
    }

    if (useGlobalParams) {
        if (sortStr == null) {
            sortStr = params.get(CommonParams.SORT);
        }
        if (startS == null) {
            startS = params.get(CommonParams.START);
        }
        if (rowsS == null) {
            rowsS = params.get(CommonParams.ROWS);
        }
    }

    int start = startS != null ? Integer.parseInt(startS) : 0;
    int rows = rowsS != null ? Integer.parseInt(rowsS) : 10;

    // Fix sort fields here
    if (sortStr != null) {
        StringBuilder builder = new StringBuilder();
        StringBuilder propertyBuilder = null;
        char c;
        for (int i = 0; i < sortStr.length(); i++) {
            c = sortStr.charAt(i);
            if (propertyBuilder == null) {
                if (!Character.isWhitespace(c) && (c != ',')) {
                    propertyBuilder = new StringBuilder();
                    propertyBuilder.append(c);
                } else {
                    builder.append(c);
                }
            } else {
                if (Character.isWhitespace(c) || (c == ',')) {
                    String toAppend = AlfrescoSolrDataModel.getInstance()
                            .mapProperty(propertyBuilder.toString(), FieldUse.SORT, getReq());
                    builder.append(toAppend);
                    builder.append(c);
                    propertyBuilder = null;
                } else {
                    propertyBuilder.append(c);
                }
            }
        }
        if (propertyBuilder != null) {
            String toAppend = AlfrescoSolrDataModel.getInstance().mapProperty(propertyBuilder.toString(),
                    FieldUse.SORT, getReq());
            builder.append(toAppend);
        }
        sortStr = builder.toString();
    }

    if (sortStr != null) {
        sortStr = sortStr.replaceAll("^ID(\\s)", "id$1");
        sortStr = sortStr.replaceAll("(\\s)ID(\\s)", "$1id$2");
    }
    SortSpec sort = SortSpecParsing.parseSortSpec(sortStr, req);

    sort.setOffset(start);
    sort.setCount(rows);
    return sort;
}

From source file:org.alfresco.solr.tracker.CascadeTrackerTest.java

License:Open Source License

/**
 * After updating the test data hierarchy (folders and file), the test checks that the cascade tracker properly
 * reflects the changes in the index.//from  w  ww .  j  ava  2s .c o m
 */
@Test
public void solrTracking_folderUpdate_shouldReIndexFolderAndChildren() throws Exception {
    // Update the folder
    Transaction txn = getTransaction(0, 1);

    folderMetaData.getProperties().put(ContentModel.PROP_CASCADE_TX,
            new StringPropertyValue(Long.toString(txn.getId())));
    folderMetaData.getProperties().put(ContentModel.PROP_NAME, new StringPropertyValue("folder2"));
    folderNode.setTxnId(txn.getId());
    folderMetaData.setTxnId(txn.getId());

    // Change the ancestor on the file just to see if it's been updated
    NodeRef nodeRef = new NodeRef(new StoreRef("workspace", "SpacesStore"), createGUID());
    childFolderMetaData.setAncestors(ancestors(nodeRef));
    fileMetaData.setAncestors(ancestors(nodeRef));

    upsertData(txn, singletonList(folderNode), singletonList(folderMetaData));

    // Check that the ancestor has been changed and indexed
    TermQuery query = new TermQuery(new Term(QueryConstants.FIELD_ANCESTOR, nodeRef.toString()));
    waitForDocCount(query, 2, MAX_WAIT_TIME);

    // Child folder and grandchild document must be updated
    // This is the same query as before but instead of using a Lucene query, it uses the /afts endpoint (request handler)
    ModifiableSolrParams params = new ModifiableSolrParams()
            .add(CommonParams.Q, QueryConstants.FIELD_ANCESTOR + ":\"" + nodeRef.toString() + "\"")
            .add(CommonParams.QT, "/afts").add(CommonParams.START, "0").add(CommonParams.ROWS, "6")
            .add(CommonParams.SORT, "id asc").add(CommonParams.FQ, "{!afts}AUTHORITY_FILTER_FROM_JSON");

    SolrServletRequest req = areq(params,
            "{\"locales\":[\"en\"], \"templates\": [{\"name\":\"t1\", \"template\":\"%cm:content\"}], \"authorities\": [ \"mike\"], \"tenants\": [ \"\" ]}");

    assertQ(req, "*[count(//doc)=2]", "//result/doc[1]/long[@name='DBID'][.='" + childFolderNode.getId() + "']",
            "//result/doc[2]/long[@name='DBID'][.='" + fileNode.getId() + "']");
}

From source file:org.dfdeshom.solr.mlt.MoreLikeThisHandler.java

License:Apache License

@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
    SolrParams params = req.getParams();

    // Set field flags
    ReturnFields returnFields = new SolrReturnFields(req);
    rsp.setReturnFields(returnFields);/*from ww  w. j a  va 2  s  . co m*/
    int flags = 0;
    if (returnFields.wantsScore()) {
        flags |= SolrIndexSearcher.GET_SCORES;
    }

    String defType = params.get(QueryParsing.DEFTYPE, QParserPlugin.DEFAULT_QTYPE);
    String q = params.get(CommonParams.Q);
    Query query = null;
    SortSpec sortSpec = null;
    List<Query> filters = null;
    QParser parser = null;

    try {
        if (q != null) {
            parser = QParser.getParser(q, defType, req);
            query = parser.getQuery();
            sortSpec = parser.getSort(true);
        }

        String[] fqs = req.getParams().getParams(CommonParams.FQ);
        if (fqs != null && fqs.length != 0) {
            filters = new ArrayList<Query>();
            for (String fq : fqs) {
                if (fq != null && fq.trim().length() != 0) {
                    QParser fqp = QParser.getParser(fq, null, req);
                    filters.add(fqp.getQuery());
                }
            }
        }
    } catch (SyntaxError e) {
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
    }

    SolrIndexSearcher searcher = req.getSearcher();

    MoreLikeThisHelper mlt = new MoreLikeThisHelper(params, searcher);

    // Hold on to the interesting terms if relevant
    TermStyle termStyle = TermStyle.get(params.get(MoreLikeThisParams.INTERESTING_TERMS));
    List<InterestingTerm> interesting = (termStyle == TermStyle.NONE) ? null
            : new ArrayList<InterestingTerm>(mlt.mlt.getMaxQueryTerms());

    DocListAndSet mltDocs = null;

    // Parse Required Params
    // This will either have a single Reader or valid query
    Reader reader = null;
    try {
        if (q == null || q.trim().length() < 1) {
            Iterable<ContentStream> streams = req.getContentStreams();
            if (streams != null) {
                Iterator<ContentStream> iter = streams.iterator();
                if (iter.hasNext()) {
                    reader = iter.next().getReader();
                }
                if (iter.hasNext()) {
                    throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                            "MoreLikeThis does not support multiple ContentStreams");
                }
            }
        }

        int start = params.getInt(CommonParams.START, 0);
        int rows = params.getInt(CommonParams.ROWS, 10);

        // Find documents MoreLikeThis - either with a reader or a query
        // --------------------------------------------------------------------------------
        if (reader != null) {
            mltDocs = mlt.getMoreLikeThis(reader, sortSpec.getSort(), start, rows, filters, interesting, flags);
        } else if (q != null) {
            // Matching options
            boolean includeMatch = params.getBool(MoreLikeThisParams.MATCH_INCLUDE, true);
            int matchOffset = params.getInt(MoreLikeThisParams.MATCH_OFFSET, 0);

            // Find the base match
            DocList match = searcher.getDocList(query, null, null, matchOffset, 1, flags); // only get the first one...
            if (includeMatch) {
                rsp.add("match", match);
            }

            // This is an iterator, but we only handle the first match
            DocIterator iterator = match.iterator();
            if (iterator.hasNext()) {
                // do a MoreLikeThis query for each document in results
                int id = iterator.nextDoc();
                mltDocs = mlt.getMoreLikeThis(parser, id, sortSpec.getSort(), start, rows, filters, interesting,
                        flags);
            }
        } else {
            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                    "MoreLikeThis requires either a query (?q=) or text to find similar documents.");
        }

    } finally {
        if (reader != null) {
            reader.close();
        }
    }

    if (mltDocs == null) {
        mltDocs = new DocListAndSet(); // avoid NPE
    }
    rsp.add("response", mltDocs.docList);

    if (interesting != null) {
        if (termStyle == TermStyle.DETAILS) {
            NamedList<Float> it = new NamedList<Float>();
            for (InterestingTerm t : interesting) {
                it.add(t.term.toString(), t.boost);
            }
            rsp.add("interestingTerms", it);
        } else {
            List<String> it = new ArrayList<String>(interesting.size());
            for (InterestingTerm t : interesting) {
                it.add(t.term.text());
            }
            rsp.add("interestingTerms", it);
        }
    }

    // maybe facet the results
    if (params.getBool(FacetParams.FACET, false)) {
        if (mltDocs.docSet == null) {
            rsp.add("facet_counts", null);
        } else {
            SimpleFacets f = new SimpleFacets(req, mltDocs.docSet, params);
            rsp.add("facet_counts", f.getFacetCounts());
        }
    }
    boolean dbg = req.getParams().getBool(CommonParams.DEBUG_QUERY, false);

    boolean dbgQuery = false, dbgResults = false;
    if (dbg == false) {//if it's true, we are doing everything anyway.
        String[] dbgParams = req.getParams().getParams(CommonParams.DEBUG);
        if (dbgParams != null) {
            for (int i = 0; i < dbgParams.length; i++) {
                if (dbgParams[i].equals(CommonParams.QUERY)) {
                    dbgQuery = true;
                } else if (dbgParams[i].equals(CommonParams.RESULTS)) {
                    dbgResults = true;
                }
            }
        }
    } else {
        dbgQuery = true;
        dbgResults = true;
    }
    // Copied from StandardRequestHandler... perhaps it should be added to doStandardDebug?
    if (dbg == true) {
        try {
            NamedList<Object> dbgInfo = SolrPluginUtils.doStandardDebug(req, q, mlt.getRawMLTQuery(),
                    mltDocs.docList, dbgQuery, dbgResults);
            if (null != dbgInfo) {
                if (null != filters) {
                    dbgInfo.add("filter_queries", req.getParams().getParams(CommonParams.FQ));
                    List<String> fqs = new ArrayList<String>(filters.size());
                    for (Query fq : filters) {
                        fqs.add(QueryParsing.toString(fq, req.getSchema()));
                    }
                    dbgInfo.add("parsed_filter_queries", fqs);
                }
                rsp.add("debug", dbgInfo);
            }
        } catch (Exception e) {
            SolrException.log(SolrCore.log, "Exception during debug", e);
            rsp.add("exception_during_debug", SolrException.toStr(e));
        }
    }
}

From source file:org.dice.solrenhancements.morelikethis.DiceMoreLikeThisHandler.java

License:Apache License

@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
    // set and override parameters
    SolrIndexSearcher searcher = req.getSearcher();
    SchemaField uniqueKeyField = searcher.getSchema().getUniqueKeyField();
    ModifiableSolrParams params = new ModifiableSolrParams(req.getParams());
    configureSolrParameters(req, params, uniqueKeyField.getName());

    // Set field flags
    ReturnFields returnFields = new SolrReturnFields(req);
    rsp.setReturnFields(returnFields);//from   w w w .  ja  v a 2 s  .co m
    int flags = 0;
    if (returnFields.wantsScore()) {
        flags |= SolrIndexSearcher.GET_SCORES;
    }
    // note: set in configureSolrParameters
    String defType = params.get(QueryParsing.DEFTYPE, EDISMAX);
    String q = params.get(CommonParams.Q);
    Query query = null;
    SortSpec sortSpec = null;
    QParser parser = null;

    List<Query> targetFqFilters = null;
    List<Query> mltFqFilters = null;

    try {
        if (q != null) {
            parser = QParser.getParser(q, defType, req);
            query = parser.getQuery();
            sortSpec = parser.getSort(true);
        } else {
            parser = QParser.getParser(null, defType, req);
            sortSpec = parser.getSort(true);
        }

        targetFqFilters = getFilters(req, CommonParams.FQ);
        mltFqFilters = getFilters(req, MoreLikeThisParams.FQ);
    } catch (SyntaxError e) {
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
    }

    MoreLikeThisHelper mlt = new MoreLikeThisHelper(params, searcher, uniqueKeyField, parser);

    // Hold on to the interesting terms if relevant
    MoreLikeThisParams.TermStyle termStyle = MoreLikeThisParams.TermStyle
            .get(params.get(MoreLikeThisParams.INTERESTING_TERMS));

    MLTResult mltResult = null;
    DocListAndSet mltDocs = null;

    // Parse Required Params
    // This will either have a single Reader or valid query
    Reader reader = null;
    try {
        int start = params.getInt(CommonParams.START, 0);
        int rows = params.getInt(CommonParams.ROWS, 10);

        // for use when passed a content stream
        if (q == null || q.trim().length() < 1) {
            reader = getContentStreamReader(req, reader);
        }
        // Find documents MoreLikeThis - either with a reader or a query
        // --------------------------------------------------------------------------------
        if (reader != null) {
            // this will only be initialized if used with a content stream (see above)
            mltResult = mlt.getMoreLikeThisFromContentSteam(reader, start, rows, mltFqFilters, flags,
                    sortSpec.getSort());
        } else if (q != null) {
            // Matching options
            mltResult = getMoreLikeTheseFromQuery(rsp, params, flags, q, query, sortSpec, targetFqFilters,
                    mltFqFilters, searcher, mlt, start, rows);
        } else {
            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                    "MoreLikeThis requires either a query (?q=) or text to find similar documents.");
        }
        if (mltResult != null) {
            mltDocs = mltResult.getDoclist();
        }

    } finally {
        if (reader != null) {
            reader.close();
        }
    }

    if (mltDocs == null) {
        mltDocs = new DocListAndSet(); // avoid NPE
    }
    rsp.add("response", mltDocs.docList);

    if (mltResult != null && termStyle != MoreLikeThisParams.TermStyle.NONE) {
        addInterestingTerms(rsp, termStyle, mltResult);
    }

    // maybe facet the results
    if (params.getBool(FacetParams.FACET, false)) {
        addFacet(req, rsp, params, mltDocs);
    }

    addDebugInfo(req, rsp, q, mltFqFilters, mlt, mltResult);
}

From source file:org.dice.solrenhancements.unsupervisedfeedback.DiceUnsupervisedFeedbackHandler.java

License:Apache License

@Override
public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
    SolrIndexSearcher searcher = req.getSearcher();
    SchemaField uniqueKeyField = searcher.getSchema().getUniqueKeyField();
    ModifiableSolrParams params = new ModifiableSolrParams(req.getParams());
    configureSolrParameters(req, params, uniqueKeyField.getName());

    // Set field flags
    ReturnFields returnFields = new SolrReturnFields(req);
    rsp.setReturnFields(returnFields);//from   w  w w .  j ava 2  s.c  o  m
    int flags = 0;
    if (returnFields.wantsScore()) {
        flags |= SolrIndexSearcher.GET_SCORES;
    }

    String defType = params.get(QueryParsing.DEFTYPE, QParserPlugin.DEFAULT_QTYPE);
    int maxDocumentsToMatch = params.getInt(UnsupervisedFeedbackParams.MAX_DOCUMENTS_TO_PROCESS,
            UnsupervisedFeedback.DEFAULT_MAX_NUM_DOCUMENTS_TO_PROCESS);
    String q = params.get(CommonParams.Q);
    Query query = null;
    SortSpec sortSpec = null;
    QParser parser = null;

    List<Query> targetFqFilters = null;
    List<Query> mltFqFilters = null;

    try {

        parser = QParser.getParser(q, defType, req);
        query = parser.getQuery();
        sortSpec = parser.getSort(true);

        targetFqFilters = getFilters(req, CommonParams.FQ);
        mltFqFilters = getFilters(req, UnsupervisedFeedbackParams.FQ);
    } catch (SyntaxError e) {
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
    }

    UnsupervisedFeedbackHelper mlt = new UnsupervisedFeedbackHelper(params, searcher, uniqueKeyField, parser);

    // Hold on to the interesting terms if relevant
    UnsupervisedFeedbackParams.TermStyle termStyle = UnsupervisedFeedbackParams.TermStyle
            .get(params.get(UnsupervisedFeedbackParams.INTERESTING_TERMS));
    List<InterestingTerm> interesting = (termStyle == UnsupervisedFeedbackParams.TermStyle.NONE) ? null
            : new ArrayList<InterestingTerm>(mlt.uf.getMaxQueryTermsPerField());

    DocListAndSet uffDocs = null;

    // Parse Required Params
    // This will either have a single Reader or valid query
    Reader reader = null;
    try {
        int start = params.getInt(CommonParams.START, 0);
        int rows = params.getInt(CommonParams.ROWS, 10);

        // Find documents MoreLikeThis - either with a reader or a query
        // --------------------------------------------------------------------------------
        if (q == null) {
            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                    "Dice unsupervised feedback handler requires either a query (?q=) to find similar documents.");

        } else {

            uffDocs = expandQueryAndReExecute(rsp, params, maxDocumentsToMatch, flags, q, query, sortSpec,
                    targetFqFilters, mltFqFilters, searcher, mlt, interesting, uffDocs, start, rows);
        }

    } finally {
        if (reader != null) {
            reader.close();
        }
    }

    if (uffDocs == null) {
        uffDocs = new DocListAndSet(); // avoid NPE
    }
    rsp.add("response", uffDocs.docList);

    if (interesting != null) {
        addInterestingTerms(rsp, termStyle, interesting);
    }

    // maybe facet the results
    if (params.getBool(FacetParams.FACET, false)) {
        addFacet(req, rsp, params, uffDocs);
    }

    addDebugInfo(req, rsp, q, mltFqFilters, mlt, uffDocs);
}

From source file:org.dspace.authority.SolrAuthority.java

License:BSD License

public Choices getMatches(String field, String text, int collection, int start, int limit, String locale,
        boolean bestMatch) {
    if (limit == 0)
        limit = 10;/* w  w w.jav a2 s . com*/

    SolrQuery queryArgs = new SolrQuery();
    if (text == null || text.trim().equals("")) {
        queryArgs.setQuery("*:*");
    } else {
        String searchField = "value";
        String localSearchField = "";
        try {
            //A downside of the authors is that the locale is sometimes a number, make sure that this isn't one
            Integer.parseInt(locale);
            locale = null;
        } catch (NumberFormatException e) {
            //Everything is allright
        }
        if (locale != null && !"".equals(locale)) {
            localSearchField = searchField + "_" + locale;
        }

        String query = "(" + toQuery(searchField, text) + ") ";
        if (!localSearchField.equals("")) {
            query += " or (" + toQuery(localSearchField, text) + ")";
        }
        queryArgs.setQuery(query);
    }

    queryArgs.addFilterQuery("field:" + field);
    queryArgs.set(CommonParams.START, start);
    //We add one to our facet limit so that we know if there are more matches
    int maxNumberOfSolrResults = limit + 1;
    if (externalResults) {
        maxNumberOfSolrResults = ConfigurationManager.getIntProperty("xmlui.lookup.select.size", 12);
    }
    queryArgs.set(CommonParams.ROWS, maxNumberOfSolrResults);

    String sortField = "value";
    String localSortField = "";
    if (StringUtils.isNotBlank(locale)) {
        localSortField = sortField + "_" + locale;
        queryArgs.setSortField(localSortField, SolrQuery.ORDER.asc);
    } else {
        queryArgs.setSortField(sortField, SolrQuery.ORDER.asc);
    }

    Choices result;
    try {
        int max = 0;
        boolean hasMore = false;
        QueryResponse searchResponse = getSearchService().search(queryArgs);
        SolrDocumentList authDocs = searchResponse.getResults();
        ArrayList<Choice> choices = new ArrayList<Choice>();
        if (authDocs != null) {
            max = (int) searchResponse.getResults().getNumFound();
            int maxDocs = authDocs.size();
            if (limit < maxDocs)
                maxDocs = limit;
            List<AuthorityValue> alreadyPresent = new ArrayList<AuthorityValue>();
            for (int i = 0; i < maxDocs; i++) {
                SolrDocument solrDocument = authDocs.get(i);
                if (solrDocument != null) {
                    AuthorityValue val = AuthorityValue.fromSolr(solrDocument);

                    Map<String, String> extras = val.choiceSelectMap();
                    extras.put("insolr", val.getId());
                    choices.add(new Choice(val.getId(), val.getValue(), val.getValue(), extras));
                    alreadyPresent.add(val);
                }
            }

            if (externalResults && StringUtils.isNotBlank(text)) {
                int sizeFromSolr = alreadyPresent.size();
                int maxExternalResults = limit <= 10 ? Math.max(limit - sizeFromSolr, 2)
                        : Math.max(limit - 10 - sizeFromSolr, 2) + limit - 10;
                addExternalResults(text, choices, alreadyPresent, maxExternalResults);
            }

            // hasMore = (authDocs.size() == (limit + 1));
            hasMore = true;
        }

        int confidence;
        if (choices.size() == 0)
            confidence = Choices.CF_NOTFOUND;
        else if (choices.size() == 1)
            confidence = Choices.CF_UNCERTAIN;
        else
            confidence = Choices.CF_AMBIGUOUS;

        result = new Choices(choices.toArray(new Choice[choices.size()]), start,
                hasMore ? max : choices.size() + start, confidence, hasMore);
    } catch (Exception e) {
        log.error("Error while retrieving authority values {field: " + field + ", prefix:" + text + "}", e);
        result = new Choices(true);
    }

    return result; //To change body of implemented methods use File | Settings | File Templates.
}

From source file:org.dspace.statistics.SolrLogger.java

License:BSD License

public static void shardSolrIndex() throws IOException, SolrServerException {
    /*//from   w  ww .  ja  va 2s.c  om
    Start by faceting by year so we can include each year in a separate core !
     */
    SolrQuery yearRangeQuery = new SolrQuery();
    yearRangeQuery.setQuery("*:*");
    yearRangeQuery.setRows(0);
    yearRangeQuery.setFacet(true);
    yearRangeQuery.add(FacetParams.FACET_RANGE, "time");
    //We go back to 2000 the year 2000, this is a bit overkill but this way we ensure we have everything
    //The alternative would be to sort but that isn't recommended since it would be a very costly query !
    yearRangeQuery.add(FacetParams.FACET_RANGE_START,
            "NOW/YEAR-" + (Calendar.getInstance().get(Calendar.YEAR) - 2000) + "YEARS");
    //Add the +0year to ensure that we DO NOT include the current year
    yearRangeQuery.add(FacetParams.FACET_RANGE_END, "NOW/YEAR+0YEARS");
    yearRangeQuery.add(FacetParams.FACET_RANGE_GAP, "+1YEAR");
    yearRangeQuery.add(FacetParams.FACET_MINCOUNT, String.valueOf(1));

    //Create a temp directory to store our files in !
    File tempDirectory = new File(
            ConfigurationManager.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
    tempDirectory.mkdirs();

    QueryResponse queryResponse = solr.query(yearRangeQuery);
    //We only have one range query !
    List<RangeFacet.Count> yearResults = queryResponse.getFacetRanges().get(0).getCounts();
    for (RangeFacet.Count count : yearResults) {
        long totalRecords = count.getCount();

        //Create a range query from this !
        //We start with out current year
        DCDate dcStart = new DCDate(count.getValue());
        Calendar endDate = Calendar.getInstance();
        //Advance one year for the start of the next one !
        endDate.setTime(dcStart.toDate());
        endDate.add(Calendar.YEAR, 1);
        DCDate dcEndDate = new DCDate(endDate.getTime());

        StringBuilder filterQuery = new StringBuilder();
        filterQuery.append("time:([");
        filterQuery.append(ClientUtils.escapeQueryChars(dcStart.toString()));
        filterQuery.append(" TO ");
        filterQuery.append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append("]");
        //The next part of the filter query excludes the content from midnight of the next year !
        filterQuery.append(" NOT ").append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append(")");

        Map<String, String> yearQueryParams = new HashMap<String, String>();
        yearQueryParams.put(CommonParams.Q, "*:*");
        yearQueryParams.put(CommonParams.ROWS, String.valueOf(10000));
        yearQueryParams.put(CommonParams.FQ, filterQuery.toString());
        yearQueryParams.put(CommonParams.WT, "csv");

        //Start by creating a new core
        String coreName = "statistics-" + dcStart.getYear();
        HttpSolrServer statisticsYearServer = createCore(solr, coreName);

        System.out.println("Moving: " + totalRecords + " into core " + coreName);
        log.info("Moving: " + totalRecords + " records into core " + coreName);

        List<File> filesToUpload = new ArrayList<File>();
        for (int i = 0; i < totalRecords; i += 10000) {
            String solrRequestUrl = solr.getBaseURL() + "/select";
            solrRequestUrl = generateURL(solrRequestUrl, yearQueryParams);

            HttpGet get = new HttpGet(solrRequestUrl);
            HttpResponse response = new DefaultHttpClient().execute(get);
            InputStream csvInputstream = response.getEntity().getContent();
            //Write the csv ouput to a file !
            File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYear()
                    + "." + i + ".csv");
            FileUtils.copyInputStreamToFile(csvInputstream, csvFile);
            filesToUpload.add(csvFile);

            //Add 10000 & start over again
            yearQueryParams.put(CommonParams.START, String.valueOf((i + 10000)));
        }

        for (File tempCsv : filesToUpload) {
            //Upload the data in the csv files to our new solr core
            ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest(
                    "/update/csv");
            contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
            contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");

            statisticsYearServer.request(contentStreamUpdateRequest);
        }
        statisticsYearServer.commit(true, true);

        //Delete contents of this year from our year query !
        solr.deleteByQuery(filterQuery.toString());
        solr.commit(true, true);

        log.info("Moved " + totalRecords + " records into core: " + coreName);
    }

    FileUtils.deleteDirectory(tempDirectory);
}