Example usage for org.apache.solr.client.solrj SolrQuery setFields

List of usage examples for org.apache.solr.client.solrj SolrQuery setFields

Introduction

In this page you can find the example usage for org.apache.solr.client.solrj SolrQuery setFields.

Prototype

public SolrQuery setFields(String... fields) 

Source Link

Usage

From source file:org.apache.blur.slur.SolrLookingBlurServerTest.java

License:Apache License

@Test
public void fieldsRequestsShouldTurnIntoSelectors() throws Exception, TException {

    String table = "fieldsRequestsShouldTurnIntoSelectors";
    SolrServer server = TestTableCreator.newTable(table).withRowCount(1).withRecordsPerRow(2)
            .withRecordColumns("fam.value", "fam.mvf").create();

    SolrQuery query = new SolrQuery("value0-0");
    query.setFields("fam.value");
    QueryResponse response = server.query(query);

    assertEquals("We should get our doc back for a valid test.", 1l, response.getResults().getNumFound());

    SolrDocument docResult = response.getResults().get(0);

    assertEquals("value0-0", docResult.getFieldValue("fam.value"));
    assertNull("We shouldn't get this one back since it wasnt in our fields.",
            docResult.getFieldValues("fam.mvf"));

    removeTable(table);//  ww  w.  j  av a  2  s  .  c  om
}

From source file:org.apache.blur.slur.SolrLookingBlurServerTest.java

License:Apache License

@Test
public void weShouldBeAbleToPageResults() throws SolrServerException, IOException, BlurException, TException {
    String table = "weShouldBeAbleToPageResults";
    SolrServer server = createServerAndTableWithSimpleTestDoc(table);

    SolrQuery query = new SolrQuery("123");
    query.setFields("fam.value");
    QueryResponse response = server.query(query);

    assertEquals("We should get our doc back for a valid test.", 1l, response.getResults().getNumFound());

    SolrDocument docResult = response.getResults().get(0);

    assertEquals("123", docResult.getFieldValue("fam.value"));
    assertNull("We shouldn't get this one back since it wasnt in our fields.",
            docResult.getFieldValues("fam.mvf"));

    removeTable(table);//from  ww  w.j ava  2 s  .c  o  m
}

From source file:org.apache.droids.solr.AdvancedSolrHandleTest.java

License:Apache License

public void performSelection(String html, String field, String selector, String expectedValue)
        throws IOException, DroidsException, URISyntaxException, SolrServerException {
    AdvancedSolrHandler handler = new AdvancedSolrHandler();
    handler.setServer(solr);// w ww  . j  a va  2s  . co m

    HashMap<String, String> selectors = new HashMap<String, String>();
    selectors.put(field, selector);
    handler.setSelectors(selectors);

    MockContentEntity contentEntity = new MockContentEntity();
    contentEntity.setCharset("UTF-8");
    contentEntity.setMimeType("text/html");
    contentEntity.setText(html);

    handler.handle(new URI("http://localhost/"), contentEntity);
    solr.commit();

    SolrQuery query = new SolrQuery();
    query.setQuery("*:*");
    query.setFields(field);
    QueryResponse response = solr.query(query);

    SolrDocument doc = response.getResults().iterator().next();
    String value = (String) doc.getFieldValue(field);

    assertEquals(expectedValue, value);
}

From source file:org.apache.ofbiz.solr.SolrProductSearch.java

License:Apache License

/**
 * Runs a query on the Solr Search Engine and returns the results.
 * <p>/*from  w ww  . j  a  va2s .com*/
 * This function only returns an object of type QueryResponse, so it is probably not a good idea to call it directly from within the
 * groovy files (As a decent example on how to use it, however, use keywordSearch instead).
 */
public static Map<String, Object> runSolrQuery(DispatchContext dctx, Map<String, Object> context) {
    // get Connection
    HttpSolrClient client = null;
    String solrIndexName = (String) context.get("indexName");
    Map<String, Object> result;
    try {
        client = SolrUtil.getInstance().getHttpSolrClient(solrIndexName);
        // create Query Object
        SolrQuery solrQuery = new SolrQuery();
        solrQuery.setQuery((String) context.get("query"));
        // solrQuery.setQueryType("dismax");
        boolean faceted = (Boolean) context.get("facet");
        if (faceted) {
            solrQuery.setFacet(faceted);
            solrQuery.addFacetField("manu");
            solrQuery.addFacetField("cat");
            solrQuery.setFacetMinCount(1);
            solrQuery.setFacetLimit(8);

            solrQuery.addFacetQuery("listPrice:[0 TO 50]");
            solrQuery.addFacetQuery("listPrice:[50 TO 100]");
            solrQuery.addFacetQuery("listPrice:[100 TO 250]");
            solrQuery.addFacetQuery("listPrice:[250 TO 500]");
            solrQuery.addFacetQuery("listPrice:[500 TO 1000]");
            solrQuery.addFacetQuery("listPrice:[1000 TO 2500]");
            solrQuery.addFacetQuery("listPrice:[2500 TO 5000]");
            solrQuery.addFacetQuery("listPrice:[5000 TO 10000]");
            solrQuery.addFacetQuery("listPrice:[10000 TO 50000]");
            solrQuery.addFacetQuery("listPrice:[50000 TO *]");
        }

        boolean spellCheck = (Boolean) context.get("spellcheck");
        if (spellCheck) {
            solrQuery.setParam("spellcheck", spellCheck);
        }

        boolean highLight = (Boolean) context.get("highlight");
        if (highLight) {
            solrQuery.setHighlight(highLight);
            solrQuery.setHighlightSimplePre("<span class=\"highlight\">");
            solrQuery.addHighlightField("description");
            solrQuery.setHighlightSimplePost("</span>");
            solrQuery.setHighlightSnippets(2);
        }

        // Set additional Parameter
        // SolrQuery.ORDER order = SolrQuery.ORDER.desc;

        if (context.get("viewIndex") != null && (Integer) context.get("viewIndex") > 0) {
            solrQuery.setStart((Integer) context.get("viewIndex"));
        }
        if (context.get("viewSize") != null && (Integer) context.get("viewSize") > 0) {
            solrQuery.setRows((Integer) context.get("viewSize"));
        }

        // if ((List) context.get("queryFilter") != null && ((ArrayList<SolrDocument>) context.get("queryFilter")).size() > 0) {
        // List filter = (List) context.get("queryFilter");
        // String[] tn = new String[filter.size()];
        // Iterator it = filter.iterator();
        // for (int i = 0; i < filter.size(); i++) {
        // tn[i] = (String) filter.get(i);
        // }
        // solrQuery.setFilterQueries(tn);
        // }
        String queryFilter = (String) context.get("queryFilter");
        if (UtilValidate.isNotEmpty(queryFilter))
            solrQuery.setFilterQueries(queryFilter.split(" "));
        if ((String) context.get("returnFields") != null) {
            solrQuery.setFields((String) context.get("returnFields"));
        }

        // if((Boolean)context.get("sortByReverse"))order.reverse();
        if ((String) context.get("sortBy") != null && ((String) context.get("sortBy")).length() > 0) {
            SolrQuery.ORDER order;
            if (!((Boolean) context.get("sortByReverse")))
                order = SolrQuery.ORDER.asc;
            else
                order = SolrQuery.ORDER.desc;
            solrQuery.setSort(((String) context.get("sortBy")).replaceFirst("-", ""), order);
        }

        if ((String) context.get("facetQuery") != null) {
            solrQuery.addFacetQuery((String) context.get("facetQuery"));
        }

        QueryResponse rsp = client.query(solrQuery);
        result = ServiceUtil.returnSuccess();
        result.put("queryResult", rsp);
    } catch (Exception e) {
        Debug.logError(e, e.getMessage(), module);
        result = ServiceUtil.returnError(e.toString());
    } finally {
        if (client != null) {
            try {
                client.close();
            } catch (IOException e) {
                // do nothing
            }
        }
    }
    return result;
}

From source file:org.apache.ofbiz.solr.SolrUtil.java

License:Apache License

public static Map<String, Object> categoriesAvailable(String catalogId, String categoryId, String productId,
        String facetPrefix, boolean displayproducts, int viewIndex, int viewSize, String solrIndexName) {
    // create the data model
    Map<String, Object> result = new HashMap<String, Object>();
    HttpSolrClient client = null;/*from w  w  w  .  j  ava 2 s . c o m*/
    QueryResponse returnMap = new QueryResponse();
    try {
        // do the basic query
        client = getHttpSolrClient(solrIndexName);
        // create Query Object
        String query = "inStock[1 TO *]";
        if (categoryId != null)
            query += " +cat:" + categoryId;
        else if (productId != null)
            query += " +productId:" + productId;
        SolrQuery solrQuery = new SolrQuery();
        solrQuery.setQuery(query);

        if (catalogId != null)
            solrQuery.setFilterQueries("catalog:" + catalogId);
        if (displayproducts) {
            if (viewSize > -1) {
                solrQuery.setRows(viewSize);
            } else
                solrQuery.setRows(50000);
            if (viewIndex > -1) {
                solrQuery.setStart(viewIndex);
            }
        } else {
            solrQuery.setFields("cat");
            solrQuery.setRows(0);
        }

        if (UtilValidate.isNotEmpty(facetPrefix)) {
            solrQuery.setFacetPrefix(facetPrefix);
        }

        solrQuery.setFacetMinCount(0);
        solrQuery.setFacet(true);
        solrQuery.addFacetField("cat");
        solrQuery.setFacetLimit(-1);
        Debug.logVerbose("solr: solrQuery: " + solrQuery, module);
        returnMap = client.query(solrQuery, METHOD.POST);
        result.put("rows", returnMap);
        result.put("numFound", returnMap.getResults().getNumFound());
    } catch (Exception e) {
        Debug.logError(e.getMessage(), module);
    }
    return result;
}

From source file:org.bigsolr.hadoop.SolrInputFormat.java

License:Apache License

@Override
public List<InputSplit> getSplits(JobContext context) throws IOException, InterruptedException {
    log.info("SolrInputFormat -> getSplits");

    Configuration conf = context.getConfiguration();
    String collectionName = conf.get(COLLECTION_NAME);
    int numSplits = context.getNumReduceTasks();
    SolrServer solr = SolrOperations.getSolrServer(conf);

    final SolrQuery solrQuery = new SolrQuery(conf.get(SOLR_QUERY));
    solrQuery.setFields(ID_FIELD);
    solrQuery.setRows(50);/* www. jav a 2s  .  co  m*/
    solrQuery.set("collection", collectionName);
    solrQuery.setStart(0);

    QueryResponse response;
    try {
        response = solr.query(solrQuery);
    } catch (final SolrServerException e) {
        throw new IOException(e);
    }

    int numResults = (int) response.getResults().getNumFound();
    int numDocsPerSplit = (numResults / numSplits);
    int currentDoc = 0;

    List<InputSplit> splits = new ArrayList<InputSplit>();
    for (int i = 0; i < numSplits - 1; i++) {
        splits.add(new SolrInputSplit(currentDoc, numDocsPerSplit));
        currentDoc += numDocsPerSplit;
    }
    splits.add(new SolrInputSplit(currentDoc, numResults - currentDoc));

    return splits;
}

From source file:org.bigsolr.hadoop.SolrInputFormat.java

License:Apache License

@Override
public org.apache.hadoop.mapred.InputSplit[] getSplits(org.apache.hadoop.mapred.JobConf conf, int numSplits)
        throws IOException {
    log.info("SolrInputFormat -> getSplits");
    String collectionName = conf.get(COLLECTION_NAME);
    SolrServer solr = SolrOperations.getSolrServer(conf);

    final SolrQuery solrQuery = new SolrQuery(conf.get(SOLR_QUERY));
    solrQuery.setFields(ID_FIELD);
    solrQuery.setRows(50);//from   ww w  .  j  a va 2s .c  om
    solrQuery.set("collection", collectionName);
    solrQuery.setStart(0);

    QueryResponse response;
    try {
        response = solr.query(solrQuery);
    } catch (final SolrServerException e) {
        throw new IOException(e);
    }

    int numResults = (int) response.getResults().getNumFound();
    int numDocsPerSplit = (numResults / numSplits);
    int currentDoc = 0;

    List<InputSplit> splits = new ArrayList<InputSplit>();
    for (int i = 0; i < numSplits - 1; i++) {
        splits.add(new SolrInputSplit(currentDoc, numDocsPerSplit));
        currentDoc += numDocsPerSplit;
    }
    splits.add(new SolrInputSplit(currentDoc, numResults - currentDoc));

    return splits.toArray(new SolrInputSplit[splits.size()]);
}

From source file:org.bigsolr.hadoop.SolrInputFormat.java

License:Apache License

@Override
public RecordReader<NullWritable, SolrRecord> createRecordReader(InputSplit split, TaskAttemptContext context)
        throws IOException, InterruptedException {

    log.info("SolrInputFormat -> createRecordReader");

    Configuration conf = context.getConfiguration();
    org.apache.hadoop.mapred.Reporter reporter = null; // Need to implement with heartbeat

    String collectionName = conf.get(COLLECTION_NAME);
    String fields = conf.get(FIELDS);
    SolrServer solr = SolrOperations.getSolrServer(conf);

    SolrInputSplit solrSplit = (SolrInputSplit) split;
    final int numDocs = (int) solrSplit.getLength();

    final SolrQuery solrQuery = new SolrQuery(conf.get(SOLR_QUERY));

    solrQuery.setFields(fields);
    solrQuery.set("collection", collectionName);
    solrQuery.setStart(solrSplit.getDocBegin());
    solrQuery.setRows(numDocs);/*  w  ww.j a  v a 2s.c  om*/

    QueryResponse response;
    try {
        response = solr.query(solrQuery);
    } catch (final SolrServerException e) {
        throw new IOException(e);
    }

    final SolrDocumentList solrDocs = response.getResults();
    return new SolrRecordReader(solrDocs, numDocs);
}

From source file:org.bigsolr.hadoop.SolrInputFormat.java

License:Apache License

@Override
public org.apache.hadoop.mapred.RecordReader<NullWritable, SolrRecord> getRecordReader(
        org.apache.hadoop.mapred.InputSplit split, org.apache.hadoop.mapred.JobConf conf,
        org.apache.hadoop.mapred.Reporter reporter) throws IOException {

    log.info("SolrInputFormat -> getRecordReader");

    String collectionName = conf.get(COLLECTION_NAME);
    String fields = conf.get(FIELDS);
    SolrServer solr = SolrOperations.getSolrServer(conf);
    int numDocs = 0;

    SolrInputSplit solrSplit = (SolrInputSplit) split;
    try {//from w w  w .  ja  v  a2s  .co  m
        numDocs = (int) solrSplit.getLength();
    } catch (final IOException e) {
        throw new IOException(e);
    }

    final SolrQuery solrQuery = new SolrQuery(conf.get(SOLR_QUERY));
    solrQuery.setFields(fields);
    solrQuery.set("collection", collectionName); // Added
    solrQuery.setStart(solrSplit.getDocBegin());
    solrQuery.setRows(numDocs);

    QueryResponse response = null;
    try {
        response = solr.query(solrQuery);
    } catch (final SolrServerException e) {
        throw new IOException(e);
    }

    final SolrDocumentList solrDocs = response.getResults();
    return new SolrRecordReader(solrDocs, numDocs);
}

From source file:org.broadleafcommerce.core.search.service.solr.SolrSearchServiceImpl.java

License:Apache License

/**
 * Given a qualified solr query string (such as "category:2002"), actually performs a solr search. It will
 * take into considering the search criteria to build out facets / pagination / sorting.
 *
 * @param qualifiedSolrQuery/* w w  w .  java  2  s .c  o  m*/
 * @param facets
 * @param searchCriteria
 * @return the ProductSearchResult of the search
 * @throws ServiceException
 */
protected SearchResult findSearchResults(String qualifiedSolrQuery, List<SearchFacetDTO> facets,
        SearchCriteria searchCriteria, String defaultSort, String... filterQueries) throws ServiceException {
    Map<String, SearchFacetDTO> namedFacetMap = getNamedFacetMap(facets, searchCriteria);

    // Build the basic query
    // Solr queries with a 'start' parameter cannot be a negative number
    int start = (searchCriteria.getPage() <= 0) ? 0 : (searchCriteria.getPage() - 1);
    SolrQuery solrQuery = new SolrQuery().setQuery(qualifiedSolrQuery).setRows(searchCriteria.getPageSize())
            .setStart((start) * searchCriteria.getPageSize());

    //This is for SolrCloud.  We assume that we are always searching against a collection aliased as "PRIMARY"
    solrQuery.setParam("collection", SolrContext.PRIMARY); //This should be ignored if not using SolrCloud

    if (useSku) {
        solrQuery.setFields(shs.getSkuIdFieldName());
    } else {
        solrQuery.setFields(shs.getProductIdFieldName());
    }
    if (filterQueries != null) {
        solrQuery.setFilterQueries(filterQueries);
    }
    solrQuery.addFilterQuery(shs.getNamespaceFieldName() + ":(\"" + shs.getCurrentNamespace() + "\")");
    solrQuery.set("defType", "edismax");
    solrQuery.set("qf", buildQueryFieldsString());

    // Attach additional restrictions
    attachSortClause(solrQuery, searchCriteria, defaultSort);
    attachActiveFacetFilters(solrQuery, namedFacetMap, searchCriteria);
    attachFacets(solrQuery, namedFacetMap);

    modifySolrQuery(solrQuery, qualifiedSolrQuery, facets, searchCriteria, defaultSort);

    extensionManager.getProxy().modifySolrQuery(solrQuery, qualifiedSolrQuery, facets, searchCriteria,
            defaultSort);

    if (LOG.isTraceEnabled()) {
        try {
            LOG.trace(URLDecoder.decode(solrQuery.toString(), "UTF-8"));
        } catch (Exception e) {
            LOG.trace("Couldn't UTF-8 URL Decode: " + solrQuery.toString());
        }
    }

    // Query solr
    QueryResponse response;
    List<SolrDocument> responseDocuments;
    int numResults = 0;
    try {
        response = SolrContext.getServer().query(solrQuery, getSolrQueryMethod());
        responseDocuments = getResponseDocuments(response);
        numResults = (int) response.getResults().getNumFound();

        if (LOG.isTraceEnabled()) {
            LOG.trace(response.toString());

            for (SolrDocument doc : responseDocuments) {
                LOG.trace(doc);
            }
        }
    } catch (SolrServerException e) {
        throw new ServiceException("Could not perform search", e);
    }

    // Get the facets
    setFacetResults(namedFacetMap, response);
    sortFacetResults(namedFacetMap);

    SearchResult result = new SearchResult();
    result.setFacets(facets);
    setPagingAttributes(result, numResults, searchCriteria);

    if (useSku) {
        List<Sku> skus = getSkus(responseDocuments);
        result.setSkus(skus);
    } else {
        // Get the products
        List<Product> products = getProducts(responseDocuments);
        result.setProducts(products);
    }

    return result;
}