Example usage for org.apache.solr.client.solrj SolrQuery set

List of usage examples for org.apache.solr.client.solrj SolrQuery set

Introduction

In this page you can find the example usage for org.apache.solr.client.solrj SolrQuery set.

Prototype

public ModifiableSolrParams set(String name, String... val) 

Source Link

Document

Replace any existing parameter with the given name.

Usage

From source file:actors.SolrActor.java

License:Apache License

public void indexUpdated(SolrIndexEvent msg) {
    try {// ww w .  j a  v  a  2s .  com
        System.out.println("SolrIndexEvent");
        SolrInputDocument doc = msg.getDocuement();
        //Making realtime GET
        System.out.println("GET");
        SolrQuery parameters = new SolrQuery();
        parameters.setRequestHandler("/get");
        String f1 = doc.getFieldValue("literal.id").toString();
        String f2 = doc.getFieldValue("literal.rev").toString();
        parameters.set("id", f1);
        parameters.set("rev", f2);
        //System.out.println(parameters);

        QueryResponse response = server.query(parameters);

        NamedList<Object> result = response.getResponse();
        //System.out.println(response.getResponse());
        //System.out.println(result.size() );
        //System.out.println();
        //System.out.println(result);
        //validate the doc exists
        if (result == null || result.get("doc") == null) {
            System.out.println("/update/extract");
            ContentStreamUpdateRequest req = new ContentStreamUpdateRequest("/update/extract");
            // url dropbox
            URL url = new URL(doc.getFieldValue("literal.links").toString());
            ContentStreamBase content = new ContentStreamBase.URLStream(url);
            System.out.println("ContentStreamBase");
            req.addContentStream(content);
            // Adittionall metadata
            req.setParam("literal.id", doc.getFieldValue("literal.id").toString());
            req.setParam("literal.title", doc.getFieldValue("literal.title").toString());
            req.setParam("literal.rev", doc.getFieldValue("literal.rev").toString());
            req.setParam("literal.when", doc.getFieldValue("literal.when").toString());
            req.setParam("literal.path", doc.getFieldValue("literal.path").toString());
            req.setParam("literal.icon", doc.getFieldValue("literal.icon").toString());
            req.setParam("literal.size", doc.getFieldValue("literal.size").toString());
            req.setParam("literal.url", doc.getFieldValue("literal.links").toString());

            req.setParam("uprefix", "attr_");
            req.setParam("fmap.content", "attr_content");
            req.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            //Requesting Solr
            result = server.request(req);
            //System.out.println("Result: " + result.toString());

        } else {
            System.out.println("It's already update");

        }

    } catch (Exception e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

From source file:actors.SolrActor.java

License:Apache License

public void performedSearch(SolrSearchEvent msg) {
    try {//w  w w .  java2s.  c o m
        System.out.println("SolrSearchEvent");
        System.out.println("");
        SolrQuery parameters = new SolrQuery();
        //parameters.setRequestHandler("/get");
        parameters.set("q", msg.getQuery());
        parameters.setFields("title", "id", "path", "when", "icon", "size", "content_type");
        parameters.set("defType", "edismax");
        parameters.addFilterQuery("title", "content");
        parameters.setStart(0);
        parameters.setHighlight(true).setHighlightSnippets(1); //set other params as needed
        parameters.setParam("hl.fl", "content");
        parameters.setParam("wt", "json");
        server.query(parameters);

        System.out.println(parameters);

    } catch (Exception e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

From source file:at.pagu.soldockr.core.QueryParser.java

License:Apache License

private void appendFacetingOnFields(SolrQuery solrQuery, FacetQuery query) {
    FacetOptions facetOptions = query.getFacetOptions();
    if (facetOptions == null || !facetOptions.hasFields()) {
        return;//from  w w w .  ja v  a 2s .  co m
    }
    solrQuery.setFacet(true);
    solrQuery.addFacetField(convertFieldListToStringArray(facetOptions.getFacetOnFields()));
    solrQuery.setFacetMinCount(facetOptions.getFacetMinCount());
    solrQuery.setFacetLimit(facetOptions.getPageable().getPageSize());
    if (facetOptions.getPageable().getPageNumber() > 0) {
        solrQuery.set(FacetParams.FACET_OFFSET, facetOptions.getPageable().getOffset());
    }
    if (FacetOptions.FacetSort.INDEX.equals(facetOptions.getFacetSort())) {
        solrQuery.setFacetSort(FacetParams.FACET_SORT_INDEX);
    }
}

From source file:at.pagu.soldockr.core.QueryParser.java

License:Apache License

private void appendGroupByFields(SolrQuery solrQuery, List<Field> fields) {
    if (CollectionUtils.isEmpty(fields)) {
        return;/*from w  w w .j  av a 2s.c o m*/
    }

    if (fields.size() > 1) {
        // there is a bug in solj which prevents multiple grouping
        // although available via HTTP call
        throw new ApiUsageException(
                "Cannot group on more than one field with current SolrJ API. Group on single field insead");
    }

    solrQuery.set(GroupParams.GROUP, true);
    solrQuery.setParam(GroupParams.GROUP_MAIN, true);

    for (Field field : fields) {
        solrQuery.add(GroupParams.GROUP_FIELD, field.getName());
    }
}

From source file:bamboo.trove.rule.RuleChangeUpdateManager.java

License:Apache License

private void processQuery(SolrQuery query, WorkLog workLog) throws SolrServerException, IOException {
    log.debug("Query for rule : {}", query.toString());
    Timer.Context context = getTimer(getName() + ".processQuery").time();
    // need to commit here so that we can ignore documents just processed
    client.commit();/*from w  w w . j  a  va 2 s. c  o  m*/

    boolean more = true;
    String cursor = CursorMarkParams.CURSOR_MARK_START;
    while (more) {
        query.set(CursorMarkParams.CURSOR_MARK_PARAM, cursor);
        Timer.Context contextQuery = getTimer(getName() + ".query").time();

        QueryResponse response = client.query(query);
        workLog.ranSearch();
        SolrDocumentList results = response.getResults();
        log.debug("Found {} (of {} docs) in QT = {} ms", results.size(), results.getNumFound(),
                response.getQTime());
        String nextCursor = response.getNextCursorMark();
        if (nextCursor == null || cursor.equals(nextCursor)) {
            more = false;
        }
        distributeResponse(results, workLog);
        cursor = nextCursor;
        contextQuery.stop();
    }

    // We do this at a higher level too, so this would seem redundant. There is a trade-off. Allowing parallelism
    // between rules means rules can sometimes be re-processed redundantly. The higher level waitUntilCaughtUp() will
    // ensure we never process rules at the same time rules are being changed.
    // By doing a wait here as well however, we can collect accurate statistics about how much actual write activity we
    // are really generating by passing the workLog into the work pool.
    // When we have a better awareness of the typical work patterns it might be worth disabling this method call and
    // then stop collecting the metrics to improve throughput.
    waitUntilCaughtUp();
    context.stop();
}

From source file:com.bindez.nlp.extract.ngram.corpus.MyanmarCorpusTask.java

@Override
public TermResult call() throws Exception {
    TermResult result = new TermResult();
    HttpSolrServer tfServer = this.server; //SolrServer.getSolrServer();

    SolrQuery query = new SolrQuery();
    query.set("q", "word:" + term);
    query.add("fl", "fl:totaltermfreq(word," + term + ")");
    QueryResponse response = server.query(query);
    SolrDocumentList results = response.getResults();
    for (SolrDocument result1 : results) {
        String count = result1.getFieldValue("fl").toString();
        result.setTerm(term);/*w w w.  j a  v  a 2  s .  c  om*/
        result.setFrequency(Long.parseLong(count));

    }

    return result;
}

From source file:com.bindez.nlp.extract.ngram.term_frequency.TermFrequency.java

public List<Word> query(Set<String> words) throws SolrServerException {
    List<Word> result = new ArrayList<Word>();
    server = new SolrServer().getSolrServer();
    for (String word : words) {
        SolrQuery query = new SolrQuery();
        query.set("q", "content:" + word);
        query.add("fl", "fl:totaltermfreq(content," + word + ")");
        query.set("rows", 1);
        QueryResponse response = server.query(query);
        SolrDocumentList results = response.getResults();
        for (SolrDocument result1 : results) {
            String count = result1.getFieldValue("fl").toString();
            Word w = new Word(word, Integer.parseInt(count));
            result.add(w);//from ww  w.j a va2s.  com
        }

    }

    return result;
}

From source file:com.bindez.nlp.extract.ngram.term_frequency.TermFrequency.java

public List<Word> getTermsFrequency(Set<String> words) throws SolrServerException {
    List<Word> result = new ArrayList<Word>();
    server = new SolrServer().getSolrServer();

    for (String word : words) {
        SolrQuery query = new SolrQuery();

        query.setRequestHandler("/terms");
        query.set("terms.fl", "content");
        query.set("terms.regex", word);
        query.set("terms", "true");
        query.set("shards.qt", "/terms");
        query.set("distrib", "true");

        QueryResponse response = server.query(query);
        TermsResponse termsRes = response.getTermsResponse();
        List<TermsResponse.Term> terms = termsRes.getTerms("content");
        TermsResponse.Term term = null;//ww  w  . j av a2 s.  c  o m
        Word w = new Word();
        if (terms != null && terms.size() > 0) {
            term = terms.get(0);
            w.setText(term.getTerm());
            w.setCount(term.getFrequency());
            result.add(w);
        }

    }

    return result;
}

From source file:com.bindez.nlp.extract.ngram.term_frequency.TermFrequencyTask.java

@Override
public TermResult call() throws Exception {
    TermResult result = new TermResult();
    HttpSolrServer tfServer = this.server; //SolrServer.getSolrServer();
    /*//from  w  w w.j  a va2s.  com
        SolrQuery query = new SolrQuery();
        query.setRequestHandler("/terms");
        query.set("terms.fl", "content");
        query.set("terms.regex", term) ;
        query.set("terms", "true");
        query.set("shards.qt","/terms");
        query.set("distrib", "true");
        QueryResponse response = server.query(query);
        TermsResponse termsRes = response.getTermsResponse();
        List<TermsResponse.Term> terms= termsRes.getTerms("content");
                
        TermsResponse.Term solrTerm = null ;
        if(terms != null && terms.size() > 0){
         solrTerm = terms.get(0);
         result.setTerm(term);
         result.setFrequency(solrTerm.getFrequency());
        }else{
            result.setTerm(term);
            result.setFrequency(0);
        }
    */

    SolrQuery query = new SolrQuery();
    query.set("q", "content:" + term);
    query.add("fl", "fl:totaltermfreq(content," + term + ")");
    QueryResponse response = server.query(query);
    SolrDocumentList results = response.getResults();
    for (SolrDocument result1 : results) {
        String count = result1.getFieldValue("fl").toString();
        result.setTerm(term);
        result.setFrequency(Long.parseLong(count));

    }

    return result;
}

From source file:com.bindez.nlp.extract.segmentor.WordSegmentorNgramTask.java

@Override
public TermResult call() throws Exception {
    TermResult result = new TermResult();
    HttpSolrServer tfServer = this.server; //SolrServer.getSolrServer();

    SolrQuery query = new SolrQuery();
    query.set("q", "content:" + term);
    query.add("fl", "fl:totaltermfreq(content," + term + ")");
    QueryResponse response = server.query(query);
    SolrDocumentList results = response.getResults();
    for (SolrDocument result1 : results) {
        String count = result1.getFieldValue("fl").toString();
        result.setTerm(term);//w w w .j  ava 2s. c om
        result.setFrequency(Long.parseLong(count));

    }

    return result;
}