List of usage examples for org.apache.solr.client.solrj SolrClient add
public UpdateResponse add(Iterator<SolrInputDocument> docIterator) throws SolrServerException, IOException
From source file:com.glluch.ecf2xmlmaven.Competence.java
License:Open Source License
public void toSolr() throws SolrServerException, IOException { SolrClient solr = new HttpSolrClient(serverUrl); //solr.setDefaultCollection("ecf"); SolrInputDocument doc1 = new SolrInputDocument(); doc1.addField("title", this.code); doc1.addField("content", this.terms.toString()); solr.add(doc1); //System.out.println(a); }
From source file:com.headwire.aemsolrsearch.geometrixx.listeners.SolrGeometrixxPageListener.java
License:Apache License
protected void addOrUpdatePage(Resource pageRes, SolrClient solr) { if (pageRes == null) { LOG.error("Page does not exist to add/update in solr"); return;/*from www . j av a 2s. c om*/ } GeometrixxContentType dataPage = GeometrixxContentTypeFactory.getInstance(pageRes); try { LOG.info("Adding/updating page " + pageRes.getPath()); solr.add(dataPage.getSolrDoc()); solr.commit(); } catch (Exception e) { LOG.error("Failure to add/update page " + pageRes.getPath(), e); } }
From source file:com.headwire.aemsolrsearch.geometrixxmedia.listeners.SolrPageListener.java
License:Apache License
protected void addOrUpdatePage(Resource pageRes, SolrClient solr) { if (pageRes == null) { LOG.error("Page does not exist to add/update in solr"); return;//from w w w . j av a 2s. c o m } GeometrixxMediaContentType dataPage = GeometrixxMediaContentTypeFactory.getInstance(pageRes); try { LOG.info("Adding/updating page " + pageRes.getPath()); solr.add(dataPage.getSolrDoc()); solr.commit(); } catch (Exception e) { LOG.error("Failure to add/update page " + pageRes.getPath(), e); } }
From source file:com.hurence.logisland.service.solr.SolrTokenizationTest.java
License:Apache License
@Test public void testTokenizerInSolr() throws SolrServerException, IOException { SolrClient server = rule.getClient(); ModifiableSolrParams params = new ModifiableSolrParams(); // ** Let's index a document into our embedded server SolrInputDocument newDoc = new SolrInputDocument(); newDoc.addField("host", "Test Document 1"); newDoc.addField("name", "doc-1"); newDoc.addField("type", "Hello world!"); newDoc.addField("start", new Date().getTime()); newDoc.addField("end", new Date().getTime() + 1000); server.add(newDoc); server.commit();// ww w . ja v a2s . c o m // ** And now let's query for it params.set("q", "name:doc-1"); QueryResponse qResp = server.query(params); SolrDocumentList docList = qResp.getResults(); assertTrue(docList.getNumFound() == 1); SolrDocument doc = docList.get(0); assertTrue(doc.getFirstValue("host").equals("Test Document 1")); }
From source file:de.qaware.chronix.solr.client.add.SolrAddingService.java
License:Apache License
/** * Adds the given collection of time series to Apache Solr. * Note: The add method do not commit the time series * * @param converter - the converter to converter the time series into a Solr document * @param timeSeries - the collection with time series * @param connection - the connection to the Apache solr. * @return true if successful, otherwise false *//*w w w . j a v a2s . c o m*/ public static <T> boolean add(TimeSeriesConverter<T> converter, Collection<T> timeSeries, SolrClient connection) { if (timeSeries == null || timeSeries.isEmpty()) { LOGGER.debug("Collection is empty. Nothing to commit"); return true; } List<SolrInputDocument> collection = Collections.synchronizedList(new ArrayList<>()); timeSeries.parallelStream().forEach(ts -> collection.add(convert(ts, converter))); //adding a collection is faster than adding single documents try { return evaluate(connection.add(collection)); } catch (SolrServerException | IOException e) { LOGGER.error("Could not add document to solr.", e); return false; } }
From source file:de.qaware.spark.importer.spark.DenormSparkSolrMetricsImporter.java
License:Apache License
/** * Helper to import a single file./*w w w .j a v a 2 s .c om*/ * * @param fileUrl the filename. * @param fileStream the stream. */ private void importIntoSolr(String fileUrl, PortableDataStream fileStream) throws ParseException { SolrClient solrCloudClient = null; try (BufferedReader reader = new BufferedReader(new InputStreamReader(fileStream.open()), 1000000)) { solrCloudClient = PooledSolrClient.getInstance(zkHost, COLLECTION_NAME).getClient(); String line; // assuming first line is a csv header // Caution: This only works with files < 128 MB / One Hadoop Block String firstLine = reader.readLine(); String[] fieldNames = firstLine.split(DELIMITER); if (!fieldNames[0].equals("Date")) { Logger.getLogger(SimpleSparkSolrMetricsImporter.class.getName()).warning("Unknown file format!"); return; } // Build a list of value JSON strings. Each string contains the values of a single CSV row. List<StringBuilder> valuesList = new ArrayList<>(); for (int i = 1; i < fieldNames.length; i++) { valuesList.add(new StringBuilder("values: [")); } // split host/process/type information out of the filename FileMetadata parts = new FileMetadata(fileUrl); Date minDate = null; Date maxDate = null; // loop over csv file, produce and add documents while ((line = reader.readLine()) != null) { String[] values = line.split(DELIMITER); if (minDate == null) { minDate = DATE_FORMAT.parse(values[0]); } else { maxDate = DATE_FORMAT.parse(values[0]); } // Produce a long String containing a JSON rep of all date:value Pairs for (int i = 1; i < fieldNames.length; i++) { valuesList.get(i - 1).append("{ d:\"").append(values[0]).append("\",v:\"").append(values[i]) .append("\"},"); } } List<SolrInputDocument> documents = new ArrayList<>(); int metricIdx = 1; for (StringBuilder values : valuesList) { values.append("]"); // close json array String metric = fieldNames[metricIdx++]; byte[] compressedJson = StringCompressor.compress(values.toString()); String compressedJsonBase64 = Base64.getEncoder().encodeToString(compressedJson); documents.add(createDocument(parts, metric, compressedJsonBase64, minDate, maxDate)); } solrCloudClient.add(documents); solrCloudClient.commit(); } catch (IOException | SolrServerException e) { Logger.getLogger(SimpleSparkSolrMetricsImporter.class.getName()).warning(e.getMessage()); } finally { if (solrCloudClient != null) { PooledSolrClient.getInstance(zkHost, COLLECTION_NAME).takeBack(solrCloudClient); } } }
From source file:net.hasor.search.server.rsf.service.SorlDumpService.java
License:Apache License
private UpdateSearchResult addSolrDoc(final List<SolrInputDocument> solrDocs, final Integer commitWithinMs) { if (solrDocs == null || solrDocs.isEmpty()) { UpdateSearchResult result = new UpdateSearchResult(); result.setSuccess(false);// ww w . j a v a 2 s. c o m result.setMessage("docs is empty or null."); LoggerHelper.logWarn("addList failure, %s", result); return result; } // return this.doExecute(new ExecuteService() { @Override public UpdateResponse doExecute(SolrClient solrClient) throws Throwable { if (commitWithinMs == null) { return solrClient.add(solrDocs); } else { return solrClient.add(solrDocs, commitWithinMs.intValue()); } } }); }
From source file:org.apache.nifi.processors.solr.TestPutSolrContentStream.java
License:Apache License
@Test public void testDeleteWithXml() throws IOException, SolrServerException { final SolrClient solrClient = createEmbeddedSolrClient(DEFAULT_SOLR_CORE); final TestableProcessor proc = new TestableProcessor(solrClient); final TestRunner runner = createDefaultTestRunner(proc); runner.setProperty(PutSolrContentStream.CONTENT_STREAM_PATH, "/update"); runner.setProperty(PutSolrContentStream.CONTENT_TYPE, "application/xml"); runner.setProperty("commit", "true"); // add a document so there is something to delete SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", "1"); doc.addField("first", "bob"); doc.addField("last", "smith"); doc.addField("created", new Date()); solrClient.add(doc); solrClient.commit();//from ww w .j a v a2 s .co m // prove the document got added SolrQuery query = new SolrQuery("*:*"); QueryResponse qResponse = solrClient.query(query); Assert.assertEquals(1, qResponse.getResults().getNumFound()); // run the processor with a delete-by-query command runner.enqueue("<delete><query>first:bob</query></delete>".getBytes("UTF-8")); runner.run(1, false); // prove the document got deleted qResponse = solrClient.query(query); Assert.assertEquals(0, qResponse.getResults().getNumFound()); }
From source file:org.apache.nutch.indexwriter.solr.TestSolrJ.java
License:Apache License
/** * query the example//from w w w. j a v a 2s. co m */ @Test public void testQuery() throws Exception { SolrClient client = new HttpSolrClient.Builder(serverUrl).build(); // Empty the database... client.deleteByQuery("*:*");// delete everything! // Now add something... SolrInputDocument doc = new SolrInputDocument(); String docID = "1112211111"; doc.addField("id", docID, 1.0f); doc.addField("name", "my name!", 1.0f); assertEquals(null, doc.getField("foo")); assertTrue(doc.getField("name").getValue() != null); UpdateResponse upres = client.add(doc); // System.out.println( "ADD:"+upres.getResponse() ); assertEquals(0, upres.getStatus()); upres = client.commit(true, true); // System.out.println( "COMMIT:"+upres.getResponse() ); assertEquals(0, upres.getStatus()); upres = client.optimize(true, true); // System.out.println( "OPTIMIZE:"+upres.getResponse() ); assertEquals(0, upres.getStatus()); SolrQuery query = new SolrQuery(); query.setQuery("id:" + docID); QueryResponse response = client.query(query); assertEquals(docID, response.getResults().get(0).getFieldValue("id")); // Now add a few docs for facet testing... List<SolrInputDocument> docs = new ArrayList<>(); SolrInputDocument doc2 = new SolrInputDocument(); doc2.addField("id", "2", 1.0f); doc2.addField("inStock", true, 1.0f); doc2.addField("price", 2, 1.0f); doc2.addField("timestamp_dt", new java.util.Date(), 1.0f); docs.add(doc2); SolrInputDocument doc3 = new SolrInputDocument(); doc3.addField("id", "3", 1.0f); doc3.addField("inStock", false, 1.0f); doc3.addField("price", 3, 1.0f); doc3.addField("timestamp_dt", new java.util.Date(), 1.0f); docs.add(doc3); SolrInputDocument doc4 = new SolrInputDocument(); doc4.addField("id", "4", 1.0f); doc4.addField("inStock", true, 1.0f); doc4.addField("price", 4, 1.0f); doc4.addField("timestamp_dt", new java.util.Date(), 1.0f); docs.add(doc4); SolrInputDocument doc5 = new SolrInputDocument(); doc5.addField("id", "5", 1.0f); doc5.addField("inStock", false, 1.0f); doc5.addField("price", 5, 1.0f); doc5.addField("timestamp_dt", new java.util.Date(), 1.0f); docs.add(doc5); upres = client.add(docs); // System.out.println( "ADD:"+upres.getResponse() ); assertEquals(0, upres.getStatus()); upres = client.commit(true, true); // System.out.println( "COMMIT:"+upres.getResponse() ); assertEquals(0, upres.getStatus()); upres = client.optimize(true, true); // System.out.println( "OPTIMIZE:"+upres.getResponse() ); assertEquals(0, upres.getStatus()); query = new SolrQuery("*:*"); query.addFacetQuery("price:[* TO 2]"); query.addFacetQuery("price:[2 TO 4]"); query.addFacetQuery("price:[5 TO *]"); query.addFacetField("inStock"); query.addFacetField("price"); query.addFacetField("timestamp_dt"); query.removeFilterQuery("inStock:true"); response = client.query(query); assertEquals(0, response.getStatus()); assertEquals(5, response.getResults().getNumFound()); assertEquals(3, response.getFacetQuery().size()); assertEquals(2, response.getFacetField("inStock").getValueCount()); assertEquals(4, response.getFacetField("price").getValueCount()); // test a second query, test making a copy of the main query SolrQuery query2 = query.getCopy(); query2.addFilterQuery("inStock:true"); response = client.query(query2); assertEquals(1, query2.getFilterQueries().length); assertEquals(0, response.getStatus()); assertEquals(2, response.getResults().getNumFound()); assertFalse(query.getFilterQueries() == query2.getFilterQueries()); // sanity check round tripping of params... query = new SolrQuery("foo"); query.addFilterQuery("{!field f=inStock}true"); query.addFilterQuery("{!term f=name}hoss"); query.addFacetQuery("price:[* TO 2]"); query.addFacetQuery("price:[2 TO 4]"); response = client.query(query); assertTrue("echoed params are not a NamedList: " + response.getResponseHeader().get("params").getClass(), response.getResponseHeader().get("params") instanceof NamedList); NamedList echo = (NamedList) response.getResponseHeader().get("params"); List values = null; assertEquals("foo", echo.get("q")); assertTrue("echoed fq is not a List: " + echo.get("fq").getClass(), echo.get("fq") instanceof List); values = (List) echo.get("fq"); assertEquals(2, values.size()); assertEquals("{!field f=inStock}true", values.get(0)); assertEquals("{!term f=name}hoss", values.get(1)); assertTrue("echoed facet.query is not a List: " + echo.get("facet.query").getClass(), echo.get("facet.query") instanceof List); values = (List) echo.get("facet.query"); assertEquals(2, values.size()); assertEquals("price:[* TO 2]", values.get(0)); assertEquals("price:[2 TO 4]", values.get(1)); }
From source file:org.apache.ranger.audit.utils.SolrAppUtil.java
License:Apache License
public static UpdateResponse addDocsToSolr(final SolrClient solrClient, final Collection<SolrInputDocument> docs) throws Exception { return MiscUtil.executePrivilegedAction(new PrivilegedExceptionAction<UpdateResponse>() { @Override//from ww w . ja va2 s .co m public UpdateResponse run() throws Exception { return solrClient.add(docs); } }); }