List of usage examples for org.apache.solr.client.solrj SolrQuery SolrQuery
public SolrQuery(String q)
From source file:com.cloudera.cdk.morphline.solr.SolrMorphlineZkAliasTest.java
License:Apache License
@Override public void doTest() throws Exception { waitForRecoveriesToFinish(false);//from w ww .j a va 2s.com createAlias("aliascollection", "collection1"); morphline = parse("test-morphlines/loadSolrBasic", "aliascollection"); Record record = new Record(); record.put(Fields.ID, "id0-innsbruck"); record.put("text", "mytext"); record.put("user_screen_name", "foo"); record.put("first_name", "Nadja"); // will be sanitized startSession(); assertEquals(1, collector.getNumStartEvents()); Notifications.notifyBeginTransaction(morphline); assertTrue(morphline.process(record)); record = new Record(); record.put(Fields.ID, "id1-innsbruck"); record.put("text", "mytext1"); record.put("user_screen_name", "foo1"); record.put("first_name", "Nadja1"); // will be sanitized assertTrue(morphline.process(record)); Record expected = new Record(); expected.put(Fields.ID, "id0-innsbruck"); expected.put("text", "mytext"); expected.put("user_screen_name", "foo"); Iterator<Record> citer = collector.getRecords().iterator(); assertEquals(expected, citer.next()); Record expected2 = new Record(); expected2.put(Fields.ID, "id1-innsbruck"); expected2.put("text", "mytext1"); expected2.put("user_screen_name", "foo1"); assertEquals(expected2, citer.next()); assertFalse(citer.hasNext()); commit(); QueryResponse rsp = cloudClient .query(new SolrQuery("*:*").setRows(100000).addSort(Fields.ID, SolrQuery.ORDER.asc)); //System.out.println(rsp); Iterator<SolrDocument> iter = rsp.getResults().iterator(); assertEquals(expected.getFields(), next(iter)); assertEquals(expected2.getFields(), next(iter)); assertFalse(iter.hasNext()); Notifications.notifyRollbackTransaction(morphline); Notifications.notifyShutdown(morphline); createAlias("aliascollection", "collection1,collection2"); try { parse("test-morphlines/loadSolrBasic", "aliascollection"); fail("Expected IAE because update alias maps to multiple collections"); } catch (IllegalArgumentException e) { } }
From source file:com.cloudera.cdk.morphline.solr.SolrMorphlineZkAvroTest.java
License:Apache License
@Override public void doTest() throws Exception { File file = new File(RESOURCES_DIR + "/test-documents/sample-statuses-20120906-141433-medium.avro"); waitForRecoveriesToFinish(false);/* ww w.j a v a 2s.c o m*/ // load avro records via morphline and zk into solr morphline = parse("test-morphlines/tutorialReadAvroContainer"); Record record = new Record(); byte[] body = Files.toByteArray(file); record.put(Fields.ATTACHMENT_BODY, body); startSession(); Notifications.notifyBeginTransaction(morphline); assertTrue(morphline.process(record)); assertEquals(1, collector.getNumStartEvents()); commit(); // fetch sorted result set from solr QueryResponse rsp = cloudClient .query(new SolrQuery("*:*").setRows(100000).addSort("id", SolrQuery.ORDER.asc)); assertEquals(2104, collector.getRecords().size()); assertEquals(collector.getRecords().size(), rsp.getResults().size()); Collections.sort(collector.getRecords(), new Comparator<Record>() { @Override public int compare(Record r1, Record r2) { return r1.get("id").toString().compareTo(r2.get("id").toString()); } }); // fetch test input data and sort like solr result set List<GenericData.Record> records = new ArrayList(); FileReader<GenericData.Record> reader = new DataFileReader(file, new GenericDatumReader()); while (reader.hasNext()) { GenericData.Record expected = reader.next(); records.add(expected); } assertEquals(collector.getRecords().size(), records.size()); Collections.sort(records, new Comparator<GenericData.Record>() { @Override public int compare(GenericData.Record r1, GenericData.Record r2) { return r1.get("id").toString().compareTo(r2.get("id").toString()); } }); Object lastId = null; for (int i = 0; i < records.size(); i++) { //System.out.println("myrec" + i + ":" + records.get(i)); Object id = records.get(i); if (id != null && id.equals(lastId)) { throw new IllegalStateException( "Detected duplicate id. Test input data must not contain duplicate ids!"); } lastId = id; } for (int i = 0; i < records.size(); i++) { //System.out.println("myrsp" + i + ":" + rsp.getResults().get(i)); } Iterator<SolrDocument> rspIter = rsp.getResults().iterator(); for (int i = 0; i < records.size(); i++) { // verify morphline spat out expected data Record actual = collector.getRecords().get(i); GenericData.Record expected = records.get(i); Preconditions.checkNotNull(expected); assertTweetEquals(expected, actual, i); // verify Solr result set contains expected data actual = new Record(); actual.getFields().putAll(next(rspIter)); assertTweetEquals(expected, actual, i); } Notifications.notifyRollbackTransaction(morphline); Notifications.notifyShutdown(morphline); cloudClient.shutdown(); }
From source file:com.cloudera.cdk.morphline.solr.SolrMorphlineZkTest.java
License:Apache License
@Override public void doTest() throws Exception { waitForRecoveriesToFinish(false);/*ww w . j ava 2 s . co m*/ morphline = parse("test-morphlines/loadSolrBasic"); Record record = new Record(); record.put(Fields.ID, "id0-innsbruck"); record.put("text", "mytext"); record.put("user_screen_name", "foo"); record.put("first_name", "Nadja"); // will be sanitized startSession(); assertEquals(1, collector.getNumStartEvents()); Notifications.notifyBeginTransaction(morphline); assertTrue(morphline.process(record)); record = new Record(); record.put(Fields.ID, "id1-innsbruck"); record.put("text", "mytext1"); record.put("user_screen_name", "foo1"); record.put("first_name", "Nadja1"); // will be sanitized assertTrue(morphline.process(record)); Record expected = new Record(); expected.put(Fields.ID, "id0-innsbruck"); expected.put("text", "mytext"); expected.put("user_screen_name", "foo"); Iterator<Record> citer = collector.getRecords().iterator(); assertEquals(expected, citer.next()); Record expected2 = new Record(); expected2.put(Fields.ID, "id1-innsbruck"); expected2.put("text", "mytext1"); expected2.put("user_screen_name", "foo1"); assertEquals(expected2, citer.next()); assertFalse(citer.hasNext()); commit(); QueryResponse rsp = cloudClient .query(new SolrQuery("*:*").setRows(100000).addSort(Fields.ID, SolrQuery.ORDER.asc)); //System.out.println(rsp); Iterator<SolrDocument> iter = rsp.getResults().iterator(); assertEquals(expected.getFields(), next(iter)); assertEquals(expected2.getFields(), next(iter)); assertFalse(iter.hasNext()); Notifications.notifyRollbackTransaction(morphline); Notifications.notifyShutdown(morphline); cloudClient.shutdown(); }
From source file:com.cmart.DB.DBQuery.java
License:Open Source License
/** * Update the price of an item in the Solr database. This occurs when a user bids for an item * @param itemID The item to update// w w w. ja v a2 s .c o m * @param bidPrice The new price of the item */ public void updateSolr(long itemID, double bidPrice) { SolrQuery query = new SolrQuery("id:" + itemID); QueryResponse rsp = null; try { rsp = SOLR_SERVER.query(query); Iterator<SolrDocument> iter = rsp.getResults().iterator(); if (iter.hasNext()) { SolrDocument resultDoc = iter.next(); String id = (String) resultDoc.getFieldValue("id"); String name = (String) resultDoc.getFieldValue("name"); String description = (String) resultDoc.getFieldValue("description"); Date endDate = (Date) resultDoc.getFieldValue("endDate"); addToSolr(Long.valueOf(id), name, description, bidPrice, endDate); } } catch (SolrServerException e) { // TODO Auto-generated catch block e.printStackTrace(); } }
From source file:com.doculibre.constellio.services.StatusServicesImpl.java
License:Open Source License
@Override public List<Record> listLastIndexedRecords(RecordCollection collection, int maxSize) { List<Record> lastIndexedRecords; RecordServices recordServices = ConstellioSpringUtils.getRecordServices(); String luceneQuery = SimpleSearch.SEARCH_ALL; SolrQuery solrQuery = new SolrQuery(luceneQuery); solrQuery.setRequestHandler("standard"); solrQuery.setRows(maxSize);//w ww . ja v a2 s .c o m solrQuery.setSort(IndexField.LAST_INDEXED_FIELD, ORDER.desc); String collectionName = collection.getName(); solrQuery.setParam(ConstellioSolrQueryParams.LUCENE_QUERY, luceneQuery); solrQuery.setParam(ConstellioSolrQueryParams.COLLECTION_NAME, collectionName); SolrServer server = SolrCoreContext.getSolrServer(collectionName); if (server != null && collection.getIndexField(IndexField.LAST_INDEXED_FIELD) != null) { try { QueryResponse queryResponse = server.query(solrQuery); SolrDocumentList results = queryResponse.getResults(); List<Number> recordIds = new ArrayList<Number>(); for (SolrDocument result : results) { Long recordId = new Long(result.getFieldValue(IndexField.RECORD_ID_FIELD).toString()); recordIds.add(recordId); } if (!recordIds.isEmpty()) { lastIndexedRecords = recordServices.list(recordIds, collection); } else { lastIndexedRecords = new ArrayList<Record>(); } } catch (SolrServerException e) { throw new RuntimeException(e); } } else if (!collection.isSynchronizationRequired()) { String msg = "No SolrServer available for collection id " + collection.getId(); LOGGER.log(Level.SEVERE, msg); lastIndexedRecords = new ArrayList<Record>(); } else if (collection.getIndexField(IndexField.LAST_INDEXED_FIELD) == null) { String msg = "No " + IndexField.LAST_INDEXED_FIELD + " index field for collection id " + collection.getId(); LOGGER.log(Level.SEVERE, msg); lastIndexedRecords = new ArrayList<Record>(); } else { lastIndexedRecords = new ArrayList<Record>(); } return lastIndexedRecords; }
From source file:com.github.fengtan.sophie.tables.DocumentsTable.java
License:Open Source License
/** * Get select query and populate the start/rows/fq parameters. * /*ww w.j a v a2 s. c o m*/ * @param start * Offset at which Solr should being returning documents. * @param rows * How many rows Solr should return. * @return Solr query. */ private SolrQuery getBaseQuery(int start, int rows) { SolrQuery query = new SolrQuery("*:*"); query.setStart(start); query.setRows(rows); // Add filters. for (Entry<String, String> filter : filters.entrySet()) { if (StringUtils.equals(filter.getValue(), LABEL_EMPTY)) { // Empty value needs a special syntax. query.addFilterQuery("-" + filter.getKey() + ":[* TO *]"); } else { // Colons in value need to be escaped to avoid a syntax error. query.addFilterQuery(filter.getKey() + ":" + filter.getValue().replace(":", "\\:")); } } return query; }
From source file:com.hurence.logisland.service.solr.api.SolrClientService.java
License:Apache License
@Override public long countCollection(String name) throws DatastoreClientServiceException { try {// www . j a va2 s.c o m SolrQuery q = new SolrQuery("*:*"); q.setRows(0); // don't actually request any data return getClient().query(name, q).getResults().getNumFound(); } catch (Exception e) { throw new DatastoreClientServiceException(e); } }
From source file:com.ibm.ecod.watson.RetrieveAndRankSolrJExample.java
License:Open Source License
/** * Search for the document indexed earlier. *//*from www.j av a2 s. c om*/ private static void searchAllDocs() throws IOException { System.out.println("Searching for document..."); final SolrQuery query = new SolrQuery(QUERY_MATCHING_ANY_DOCUMENT); try { final QueryResponse response = solrClient.query(COLLECTION_NAME, query); System.out.println("Found " + response.getResults().size() + " documents!"); System.out.println(response); } catch (final SolrServerException e) { throw new RuntimeException("Failed to search!", e); } }
From source file:com.ibm.watson.apis.conversation_enhanced.retrieve_and_rank.Query.java
License:Open Source License
/** * Use the Watson Developer Cloud SDK to send the user's query to the retrive and rank service * /* w w w . j a v a2 s .c o m*/ * @param userQuery The user's query to be sent to the retrieve and rank service * @return The unaltered SOLR query responses obtained from the retrieve and rank service * @throws SolrServerException * @throws IOException */ public QueryResponse query(String userQuery) throws Exception { // Configure the Watson Developer Cloud SDK to make a call to the appropriate retrieve and rank // service. Specific information is obtained from environment variable and the services // associated with the app. See the Query constructor for details. RetrieveAndRank service = new RetrieveAndRank(); HttpSolrClient solrClient = HttpSolrClientUtils.getSolrClient(service.getSolrUrl(CLUSTER_ID), USERNAME, PASSWORD); logger.info(Messages.getString("Query.PASS_CLUSTER_DETAILS")); //$NON-NLS-1$ // Setup the query parameters final SolrQuery query = new SolrQuery(userQuery) // The fields we want in the response object .setFields(Constants.SCHEMA_FIELD_ID, Constants.SCHEMA_FIELD_BODY, Constants.SCHEMA_FIELD_TITLE, Constants.SCHEMA_FIELD_CONFIDENCE, Constants.SCHEMA_FIELD_SOURCE_URL) // The size of the SOLR snippet that we show as our initial answers .setHighlight(true).setHighlightFragsize(150).setHighlightSnippets(1) // The field to perform highlighting on .setParam("hl.fl", Constants.SCHEMA_FIELD_BODY) // The number of answers to return .setRows(Constants.RESULTS_TO_FETCH) // $NON-NLS-1$ // The retrieve and rank endpoint to hit .setRequestHandler("/fcselect") // The ranker to rank the potential answers .setParam("ranker_id", RANKER_ID); //$NON-NLS-1$ //$NON-NLS-2$ // Send the query to the retrieve and rank service to obtain answers to the user's query logger.info(Messages.getString("Query.QUERY_SOLR_RANKER")); //$NON-NLS-1$ return solrClient.query(COLLECTION_NAME, query); }
From source file:com.ibm.watson.developer_cloud.professor_languo.ingestion.RankerCreationUtil.java
License:Open Source License
/** * Retrieve a {@link CandidateAnswer} with its {@code threadPostId} by querying the /select * endpoint with query THREAD_POST_ID:x/*w w w .j a va 2s. c om*/ * * @param searcher An initialized {@link RetrieveAndRankSearcher} * @param threadPostId The THREAD_POST_ID of the answer thread * @return * @throws IOException * @throws IngestionException */ public static CandidateAnswer getCandidateAnswerById(RetrieveAndRankSearcher searcher, String threadPostId) throws IOException, IngestionException { CandidateAnswer answer = null; try { SolrQuery featureSolrQuery = new SolrQuery( RetrieveAndRankSearcherConstants.ID_FIELD + ":" + threadPostId); // specify the request handler for the feature query featureSolrQuery.setRequestHandler(RetrieveAndRankSearcherConstants.SELECT_REQUEST_HANDLER); // We expect only one response since THREAD_POST_ID is a unique key if (featureSolrQuery.size() != 1) { throw new IngestionException(threadPostId); } featureSolrQuery.setRows(1); final QueryRequest featureRequest = new QueryRequest(featureSolrQuery); QueryResponse featureResponse = null; featureResponse = searcher.processSolrRequest(featureRequest); for (SolrDocument doc : featureResponse.getResults()) { byte[] bin = (byte[]) doc.getFieldValue(IndexDocumentFieldName.SERIALIZED_THREAD.toString()); answer = StackExchangeThreadSerializer.deserializeThreadFromBinArr(bin); } } catch (IOException | SolrServerException | InterruptedException e) { logger.error(e.getMessage()); } return answer; }