Example usage for org.apache.solr.common SolrDocument get

List of usage examples for org.apache.solr.common SolrDocument get

Introduction

In this page you can find the example usage for org.apache.solr.common SolrDocument get.

Prototype

@Override
    public Object get(Object key) 

Source Link

Usage

From source file:alba.solr.searchcomponents.AlbaRequestHandler.java

License:Apache License

public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception {
    // TODO Auto-generated method stub

    /* List<SearchComponent> components = new ArrayList<SearchComponent>();
            //from   w  w  w.  j  a v a 2s.  c o  m
    MySearchComponent msc = new MySearchComponent();
            
    ResponseBuilder rb = new ResponseBuilder(req, rsp, components);
            
    msc.process(rb);*/

    //rsp.add("hello", rb.rsp.getValues());

    req.getContext().put(Loader.FUNCTIONS, functions);

    Object params[] = new Object[2];

    params[0] = req;
    params[1] = rsp;

    // what if this method calls rsp.add( .. ) ????
    Object result = this.function.getMethod().invoke(this.function.getInstance(), params);

    if (Map.class.isAssignableFrom(result.getClass())) {
        // if we got a Map, just return it as-is
        rsp.add(this.sectionName, result);
    } else // if we got anything else, try to serialize it!
    if (List.class.isAssignableFrom(result.getClass())) {
        for (Object o : (List) result) {
            DocumentObjectBinder dob = new DocumentObjectBinder();
            SolrInputDocument sd = dob.toSolrInputDocument(o);
            SolrDocument dest = ClientUtils.toSolrDocument(sd);

            HashMap<Object, Object> nl = (HashMap<Object, Object>) dest.get("functionDescriptor");

            //rsp.add(nl.get("name").toString(), dest2);

            rsp.add(null, dest);
        }
    }
    if (StaticResource.class.isAssignableFrom(result.getClass())) {
        FilteredShowFileRequestHandler file = new FilteredShowFileRequestHandler();

        file.init(new NamedList()); //to initialize internal variables - but in this way it will NOT get the proper configuration from SolrConfig!

        ModifiableSolrParams solrParams = new ModifiableSolrParams(req.getParams());

        StaticResource resource = ((StaticResource) result);
        solrParams.set("file", resource.getName());
        //TODO Proper mapping here!!
        //solrParams.set("contentType", "text/xml;charset=utf-8");

        solrParams.set("contentType", resource.getContentType());
        req.setParams(solrParams);

        file.handleRequest(req, rsp);
        //   logger.error("returning the content of " + );
    } else {
        // unable to do any kind of serialization.. just add the result and let the ResponseWriter handle it
        rsp.add(null, result);
    }

}

From source file:at.kc.tugraz.ss.service.solr.datatypes.SSSolrSearchResult.java

License:Apache License

/**
 * tries to convert a solr field into a String
 *
 * @param sdoc//  w  w  w .  j  a va2 s.  co m
 * @param field
 * @return empty string in case conversion was not successful
 */
private static synchronized String getFieldAsString(final SolrDocument sdoc,
        final SSSolrSearchFieldEnum field) {

    Object tmp = sdoc.get(field.toString());

    if (tmp instanceof String) {
        return (String) tmp;

    } else if (tmp instanceof ArrayList) {

        @SuppressWarnings("unchecked")
        ArrayList<String> al = (ArrayList<String>) tmp;
        StringBuilder sb = new StringBuilder();
        for (String s : al) {
            sb.append(s).append(" ");
        }
        return sb.toString();
    } else {
        String x = "xx";
        if (tmp != null) {
            x = tmp.getClass() + "";
        }
        SSLogU.warn("field " + field + " could not be converted to any supported class. " + x);
        return "";
    }
}

From source file:at.tugraz.sss.serv.datatype.SSSolrSearchResult.java

License:Apache License

/**
 * tries to convert a solr field into a String
 *
 * @param sdoc/*from  ww w. j a  va  2 s  .  c  o m*/
 * @param field
 * @return empty string in case conversion was not successful
 */
private static synchronized String getFieldAsString(final SolrDocument sdoc, final SSSolrSearchFieldE field) {

    Object tmp = sdoc.get(field.toString());

    if (tmp instanceof String) {
        return (String) tmp;

    } else if (tmp instanceof ArrayList) {

        @SuppressWarnings("unchecked")
        ArrayList<String> al = (ArrayList<String>) tmp;
        StringBuilder sb = new StringBuilder();
        for (String s : al) {
            sb.append(s).append(" ");
        }
        return sb.toString();
    } else {
        String x = "xx";
        if (tmp != null) {
            x = tmp.getClass() + "";
        }
        SSLogU.warn("field " + field + " could not be converted to any supported class. " + x, null);
        return "";
    }
}

From source file:com.comm.sr.common.solr.SolrQueryService.java

@Override
public List<Map<String, Object>> query(SolrCommonQuery query) throws Exception {
    List<Map<String, Object>> results = Lists.newArrayList();

    QueryGenerator<SolrQuery, SolrCommonQuery> queryGenerator = new SolrQueryGenerator();
    SolrQuery solrQuery = queryGenerator.generateFinalQuery(query);
    logger.debug("generted solr query:" + solrQuery.toString() + "");

    QueryResponse solrRespons = cloudSolrServer.query(solrQuery);
    // int totalCount=Integer.parseInt((String)solrRespons.getResponse().get("numFound"));

    SolrDocumentList solrResult = solrRespons.getResults();
    for (SolrDocument solrDocument : solrResult) {
        Map<String, Object> resultMap = Maps.newHashMap();
        List<String> flList = query.getFls();
        for (String fl : flList) {
            Object entry = solrDocument.get(fl);

            resultMap.put(fl, entry);/*  ww  w .j  a v  a  2 s  . c  om*/
        }
        results.add(resultMap);
    }

    return results;
}

From source file:com.comm.sr.service.solr.SolrQueryService.java

@Override
public List<Map<String, Object>> query(CommonQuery commonQuery) throws Exception {
    List<Map<String, Object>> results = Lists.newArrayList();

    int gender = commonQuery.getGender();
    if (commonQuery.getGender() < 0) {
        throw new RuntimeException("??");
    }/*  ww w . ja v a  2  s .  c o  m*/
    CloudSolrServer cloudSolrServer = null;
    // if (gender == 0) {
    // cloudSolrServer = CloudSolr.getWomanInstance();
    // }
    // if (gender == 1) {
    // cloudSolrServer = CloudSolr.getManInstance();
    // }
    QueryGenerator<SolrQuery, CommonQuery> queryGenerator = new SolrQueryGenerator();
    SolrQuery solrQuery = queryGenerator.generateFinalQuery(commonQuery);
    LOGGER.debug("generted solr query:" + solrQuery.toString() + "");

    QueryResponse solrRespons = cloudSolrServer.query(solrQuery);
    // int totalCount=Integer.parseInt((String)solrRespons.getResponse().get("numFound"));

    SolrDocumentList solrResult = solrRespons.getResults();
    for (SolrDocument solrDocument : solrResult) {
        Map<String, Object> resultMap = Maps.newHashMap();
        List<String> flList = commonQuery.getFls();
        for (String fl : flList) {
            Object entry = solrDocument.get(fl);

            if (fl.equals("userID")) {
                String userID = String.valueOf(solrDocument.get(fl));
                if (userID == null) {
                    continue;
                }
                userID = userID.replace(".", "").replaceAll("E[\\d]{0,}", "");
                entry = userID;
            }
            resultMap.put(fl, entry);
        }
        results.add(resultMap);
    }

    return results;

}

From source file:com.digitalpebble.storm.crawler.solr.persistence.SolrSpout.java

License:Apache License

private void populateBuffer() {
    // TODO Sames as the ElasticSearchSpout?
    // TODO Use the cursor feature?
    // https://cwiki.apache.org/confluence/display/solr/Pagination+of+Results
    SolrQuery query = new SolrQuery();

    query.setQuery("*:*").addFilterQuery("nextFetchDate:[* TO NOW]").setStart(lastStartOffset)
            .setRows(this.bufferSize);

    if (StringUtils.isNotBlank(diversityField)) {
        query.addFilterQuery(String.format("{!collapse field=%s}", diversityField));
        query.set("expand", "true").set("expand.rows", diversityBucketSize);
    }//from w w w .  j av  a 2  s  . c o  m

    try {
        QueryResponse response = connection.getClient().query(query);
        SolrDocumentList docs = new SolrDocumentList();

        if (StringUtils.isNotBlank(diversityField)) {
            // Add the main documents collapsed by the CollapsingQParser
            // plugin
            docs.addAll(response.getResults());

            Map<String, SolrDocumentList> expandedResults = response.getExpandedResults();

            for (String key : expandedResults.keySet()) {
                docs.addAll(expandedResults.get(key));
            }

        } else {
            docs = response.getResults();
        }

        int numhits = response.getResults().size();

        // no more results?
        if (numhits == 0)
            lastStartOffset = 0;
        else
            lastStartOffset += numhits;

        for (SolrDocument doc : docs) {
            String url = (String) doc.get("url");

            // is already being processed - skip it!
            if (beingProcessed.containsKey(url))
                continue;

            Metadata metadata = new Metadata();

            String mdAsString = (String) doc.get("metadata");
            // get the serialized metadata information
            if (mdAsString != null) {
                // parse the string and generate the MD accordingly
                // url.path: http://www.lemonde.fr/
                // depth: 1
                String[] kvs = mdAsString.split("\n");
                for (String pair : kvs) {
                    String[] kv = pair.split(": ");
                    if (kv.length != 2) {
                        LOG.info("Invalid key value pair {}", pair);
                        continue;
                    }
                    metadata.addValue(kv[0], kv[1]);
                }
            }

            buffer.add(new Values(url, metadata));
        }

    } catch (Exception e) {
        LOG.error("Can't query Solr: {}", e);
    }
}

From source file:com.digitalpebble.stormcrawler.solr.persistence.SolrSpout.java

License:Apache License

private void populateBuffer() {
    // TODO Sames as the ElasticSearchSpout?
    // TODO Use the cursor feature?
    // https://cwiki.apache.org/confluence/display/solr/Pagination+of+Results
    SolrQuery query = new SolrQuery();

    query.setQuery("*:*").addFilterQuery("nextFetchDate:[* TO NOW]").setStart(lastStartOffset)
            .setRows(this.bufferSize);

    if (StringUtils.isNotBlank(diversityField)) {
        query.addFilterQuery(String.format("{!collapse field=%s}", diversityField));
        query.set("expand", "true").set("expand.rows", diversityBucketSize);
    }/*from ww  w  .  j  a v a2  s.  c  om*/

    try {
        QueryResponse response = connection.getClient().query(query);
        SolrDocumentList docs = new SolrDocumentList();

        if (StringUtils.isNotBlank(diversityField)) {
            // Add the main documents collapsed by the CollapsingQParser
            // plugin
            docs.addAll(response.getResults());

            Map<String, SolrDocumentList> expandedResults = response.getExpandedResults();

            for (String key : expandedResults.keySet()) {
                docs.addAll(expandedResults.get(key));
            }

        } else {
            docs = response.getResults();
        }

        int numhits = response.getResults().size();

        // no more results?
        if (numhits == 0)
            lastStartOffset = 0;
        else
            lastStartOffset += numhits;

        String prefix = mdPrefix.concat(".");

        for (SolrDocument doc : docs) {
            String url = (String) doc.get("url");

            // is already being processed - skip it!
            if (beingProcessed.containsKey(url))
                continue;

            Metadata metadata = new Metadata();

            Iterator<String> keyIterators = doc.getFieldNames().iterator();
            while (keyIterators.hasNext()) {
                String key = keyIterators.next();

                if (key.startsWith(prefix)) {
                    Collection<Object> values = doc.getFieldValues(key);

                    key = StringUtils.replace(key, prefix, "", 1);
                    Iterator<Object> valueIterator = values.iterator();
                    while (valueIterator.hasNext()) {
                        String value = (String) valueIterator.next();

                        metadata.addValue(key, value);
                    }
                }
            }

            buffer.add(new Values(url, metadata));
        }

    } catch (Exception e) {
        LOG.error("Can't query Solr: {}", e);
    }
}

From source file:com.gisgraphy.fulltext.SolrResponseDtoBuilderTest.java

License:Open Source License

@Test
public void equalsShouldBeBasedOnFeature_id() {

    SolrDocument document1 = EasyMock.createNiceMock(SolrDocument.class);
    EasyMock.expect(document1.get(FullTextFields.FEATUREID)).andStubReturn(1L);
    EasyMock.replay(document1);//w  ww. j  a va 2s. c  o m

    SolrDocument document2 = EasyMock.createNiceMock(SolrDocument.class);
    EasyMock.expect(document2.get(FullTextFields.FEATUREID)).andStubReturn(2L);
    EasyMock.replay(document2);

    SolrResponseDto solrResponseDto = new SolrResponseDto();
    solrResponseDto.setFeature_id(1L);
    SolrResponseDto solrResponseDtoNotEquals = new SolrResponseDto();
    solrResponseDtoNotEquals.setFeature_id(2L);
    SolrResponseDto solrResponseDtoEquals = new SolrResponseDto();
    solrResponseDtoEquals.setFeature_id(1L);

    Assert.assertTrue("solrReqponseDto Without the same featureId should be equals",
            solrResponseDto.equals(solrResponseDtoEquals));
    Assert.assertFalse("solrReqponseDto With the same featureId should be equals",
            solrResponseDto.equals(solrResponseDtoNotEquals));

}

From source file:com.ifactory.press.db.solr.processor.FieldMergingProcessorTest.java

License:Apache License

@Test
public void testMergeFields() throws Exception {
    SolrInputDocument doc = new SolrInputDocument();
    doc.addField("uri", "/doc/1");
    doc.addField(TITLE_FIELD, TITLE);//from www.  j ava 2  s  .c o m
    doc.addField(TEXT_FIELD, TEST);
    solr.add(doc);
    solr.commit(false, true, true);

    // basic check that the document was inserted
    SolrQuery solrQuery = new SolrQuery("uri:\"/doc/1\"");
    QueryResponse resp = solr.query(solrQuery);
    SolrDocumentList docs = resp.getResults();
    assertEquals(1, docs.size());
    SolrDocument result = docs.get(0);
    assertEquals("/doc/1", result.get("uri"));

    // text field is tokenized, analyzed:
    assertQueryCount(1, TEXT_FIELD + ":intentional");

    // title field is tokenized, analyzed:
    assertQueryCount(1, TITLE_FIELD + ":era");
    assertQueryCount(1, TITLE_FIELD + ":dawning");

    for (TermsResponse.Term term : getTerms(TITLE_FIELD)) {
        assertNotEquals(TITLE, term.getTerm());
    }

    HashSet<String> words = new HashSet<String>(Arrays.asList(TEST.split(" ")));
    int numWords = words.size();
    List<TermsResponse.Term> terms;
    terms = getTerms("catchall");
    // one term for each word in text + 1 for the title
    assertEquals("Wrong number of terms in catchall field", numWords + 1, terms.size());
    boolean found = false;
    for (TermsResponse.Term term : terms) {
        if (TITLE.equals(term.getTerm())) {
            found = true;
        }
    }
    assertTrue("title not found in catchall terms list", found);
}

From source file:com.liferay.portal.search.solr.internal.SolrQuerySuggester.java

License:Open Source License

protected TreeSet<WeightedWord> suggestKeywords(SearchContext searchContext, String input)
        throws SearchException {

    try {/*from  www . j a v a2  s. c o  m*/
        Map<String, WeightedWord> weightedWordsMap = new HashMap<>();
        TreeSet<WeightedWord> weightedWordsSet = new TreeSet<>();

        SolrQuery solrQuery = _nGramQueryBuilder.getNGramQuery(input);

        solrQuery.addFilterQuery(getFilterQueries(searchContext, SuggestionConstants.TYPE_SPELL_CHECKER));

        solrQuery.setRows(_MAX_QUERY_RESULTS);

        QueryResponse queryResponse = _solrServer.query(solrQuery, SolrRequest.METHOD.POST);

        SolrDocumentList solrDocumentList = queryResponse.getResults();

        for (int i = 0; i < solrDocumentList.size(); i++) {
            SolrDocument solrDocument = solrDocumentList.get(i);

            List<String> suggestions = (List<String>) solrDocument.get(Field.SPELL_CHECK_WORD);

            String suggestion = suggestions.get(0);

            List<String> weights = (List<String>) solrDocument.get(Field.PRIORITY);

            float weight = GetterUtil.getFloat(weights.get(0));

            if (suggestion.equals(input)) {
                weight = _INFINITE_WEIGHT;
            } else {
                String inputLowerCase = StringUtil.toLowerCase(input);
                String suggestionLowerCase = StringUtil.toLowerCase(suggestion);

                float distance = _stringDistance.getDistance(inputLowerCase, suggestionLowerCase);

                if (distance >= _distanceThreshold) {
                    weight = weight + distance;
                }
            }

            WeightedWord weightedWord = weightedWordsMap.get(suggestion);

            if (weightedWord == null) {
                weightedWord = new WeightedWord(suggestion, weight);

                weightedWordsMap.put(suggestion, weightedWord);

                weightedWordsSet.add(weightedWord);
            } else if (weight > weightedWord.getWeight()) {
                weightedWord.setWeight(weight);
            }
        }

        return weightedWordsSet;
    } catch (Exception e) {
        if (_log.isDebugEnabled()) {
            _log.debug("Unable to execute Solr query", e);
        }

        throw new SearchException(e.getMessage(), e);
    }
}