List of usage examples for org.apache.solr.client.solrj SolrClient commit
public UpdateResponse commit() throws SolrServerException, IOException
From source file:com.frank.search.solr.core.SolrTemplate.java
License:Apache License
@Override public void commit() { execute(new SolrCallback<UpdateResponse>() { @Override//from w ww. j a va2 s .c o m public UpdateResponse doInSolr(SolrClient solrClient) throws SolrServerException, IOException { return solrClient.commit(); } }); }
From source file:com.headwire.aemsolrsearch.geometrixx.listeners.SolrGeometrixxPageListener.java
License:Apache License
protected void addOrUpdatePage(Resource pageRes, SolrClient solr) { if (pageRes == null) { LOG.error("Page does not exist to add/update in solr"); return;//from w w w .ja v a 2 s .co m } GeometrixxContentType dataPage = GeometrixxContentTypeFactory.getInstance(pageRes); try { LOG.info("Adding/updating page " + pageRes.getPath()); solr.add(dataPage.getSolrDoc()); solr.commit(); } catch (Exception e) { LOG.error("Failure to add/update page " + pageRes.getPath(), e); } }
From source file:com.headwire.aemsolrsearch.geometrixx.listeners.SolrGeometrixxPageListener.java
License:Apache License
protected void removePage(String id, SolrClient solr) { try {// ww w. j a v a 2s. co m LOG.info("Removing page " + id); solr.deleteById(id); solr.commit(); } catch (Exception e) { LOG.error("Failure to remove page " + id, e); } }
From source file:com.headwire.aemsolrsearch.geometrixxmedia.listeners.SolrPageListener.java
License:Apache License
protected void addOrUpdatePage(Resource pageRes, SolrClient solr) { if (pageRes == null) { LOG.error("Page does not exist to add/update in solr"); return;//from w w w . j a va 2s . com } GeometrixxMediaContentType dataPage = GeometrixxMediaContentTypeFactory.getInstance(pageRes); try { LOG.info("Adding/updating page " + pageRes.getPath()); solr.add(dataPage.getSolrDoc()); solr.commit(); } catch (Exception e) { LOG.error("Failure to add/update page " + pageRes.getPath(), e); } }
From source file:com.hurence.logisland.service.solr.SolrTokenizationTest.java
License:Apache License
@Test public void testTokenizerInSolr() throws SolrServerException, IOException { SolrClient server = rule.getClient(); ModifiableSolrParams params = new ModifiableSolrParams(); // ** Let's index a document into our embedded server SolrInputDocument newDoc = new SolrInputDocument(); newDoc.addField("host", "Test Document 1"); newDoc.addField("name", "doc-1"); newDoc.addField("type", "Hello world!"); newDoc.addField("start", new Date().getTime()); newDoc.addField("end", new Date().getTime() + 1000); server.add(newDoc);/*from w w w .ja va 2 s . co m*/ server.commit(); // ** And now let's query for it params.set("q", "name:doc-1"); QueryResponse qResp = server.query(params); SolrDocumentList docList = qResp.getResults(); assertTrue(docList.getNumFound() == 1); SolrDocument doc = docList.get(0); assertTrue(doc.getFirstValue("host").equals("Test Document 1")); }
From source file:de.qaware.spark.importer.spark.DenormSparkSolrMetricsImporter.java
License:Apache License
/** * Helper to import a single file./*from w w w .j ava 2 s.c om*/ * * @param fileUrl the filename. * @param fileStream the stream. */ private void importIntoSolr(String fileUrl, PortableDataStream fileStream) throws ParseException { SolrClient solrCloudClient = null; try (BufferedReader reader = new BufferedReader(new InputStreamReader(fileStream.open()), 1000000)) { solrCloudClient = PooledSolrClient.getInstance(zkHost, COLLECTION_NAME).getClient(); String line; // assuming first line is a csv header // Caution: This only works with files < 128 MB / One Hadoop Block String firstLine = reader.readLine(); String[] fieldNames = firstLine.split(DELIMITER); if (!fieldNames[0].equals("Date")) { Logger.getLogger(SimpleSparkSolrMetricsImporter.class.getName()).warning("Unknown file format!"); return; } // Build a list of value JSON strings. Each string contains the values of a single CSV row. List<StringBuilder> valuesList = new ArrayList<>(); for (int i = 1; i < fieldNames.length; i++) { valuesList.add(new StringBuilder("values: [")); } // split host/process/type information out of the filename FileMetadata parts = new FileMetadata(fileUrl); Date minDate = null; Date maxDate = null; // loop over csv file, produce and add documents while ((line = reader.readLine()) != null) { String[] values = line.split(DELIMITER); if (minDate == null) { minDate = DATE_FORMAT.parse(values[0]); } else { maxDate = DATE_FORMAT.parse(values[0]); } // Produce a long String containing a JSON rep of all date:value Pairs for (int i = 1; i < fieldNames.length; i++) { valuesList.get(i - 1).append("{ d:\"").append(values[0]).append("\",v:\"").append(values[i]) .append("\"},"); } } List<SolrInputDocument> documents = new ArrayList<>(); int metricIdx = 1; for (StringBuilder values : valuesList) { values.append("]"); // close json array String metric = fieldNames[metricIdx++]; byte[] compressedJson = StringCompressor.compress(values.toString()); String compressedJsonBase64 = Base64.getEncoder().encodeToString(compressedJson); documents.add(createDocument(parts, metric, compressedJsonBase64, minDate, maxDate)); } solrCloudClient.add(documents); solrCloudClient.commit(); } catch (IOException | SolrServerException e) { Logger.getLogger(SimpleSparkSolrMetricsImporter.class.getName()).warning(e.getMessage()); } finally { if (solrCloudClient != null) { PooledSolrClient.getInstance(zkHost, COLLECTION_NAME).takeBack(solrCloudClient); } } }
From source file:net.semanticmetadata.lire.solr.indexing.ParallelSolrIndexer.java
License:Open Source License
private void postIndexToServer() { try {//from w w w. ja va 2 s . c o m SolrClient client = new HttpSolrClient.Builder( "http://54.93.254.52:8983/solr/" + gender + "_" + category).build(); //SolrClient client = new HttpSolrClient.Builder("http://54.93.254.52:8983/solr/prueba").build(); DocumentBuilderFactory dbfac = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder = dbfac.newDocumentBuilder(); Document doc = docBuilder.parse(outfile); TransformerFactory tf = TransformerFactory.newInstance(); Transformer transformer = tf.newTransformer(); transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes"); StringWriter writer = new StringWriter(); transformer.transform(new DOMSource(doc), new StreamResult(writer)); String xmlOutput = writer.getBuffer().toString().replaceAll("[\n\r]", ""); DirectXmlRequest xmlreq = new DirectXmlRequest("/update", xmlOutput); client.request(xmlreq); client.commit(); } catch (SolrServerException | IOException | TransformerException | SAXException | ParserConfigurationException e) { e.printStackTrace(); } }
From source file:net.yacy.cora.federate.solr.instance.ServerShard.java
License:Open Source License
/** * Performs an explicit commit, causing pending documents to be committed for indexing * <p>//from w w w .j a v a 2s . c om * waitFlush=true and waitSearcher=true to be inline with the defaults for plain HTTP access * @throws IOException If there is a low-level I/O error. */ @Override public UpdateResponse commit() throws SolrServerException, IOException { if (!this.writeEnabled) return _dummyOKResponse; UpdateResponse ur = null; for (SolrClient s : this.shards) ur = s.commit(); return ur; }
From source file:org.apache.nifi.processors.solr.TestPutSolrContentStream.java
License:Apache License
@Test public void testDeleteWithXml() throws IOException, SolrServerException { final SolrClient solrClient = createEmbeddedSolrClient(DEFAULT_SOLR_CORE); final TestableProcessor proc = new TestableProcessor(solrClient); final TestRunner runner = createDefaultTestRunner(proc); runner.setProperty(PutSolrContentStream.CONTENT_STREAM_PATH, "/update"); runner.setProperty(PutSolrContentStream.CONTENT_TYPE, "application/xml"); runner.setProperty("commit", "true"); // add a document so there is something to delete SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", "1"); doc.addField("first", "bob"); doc.addField("last", "smith"); doc.addField("created", new Date()); solrClient.add(doc);/*ww w. jav a 2 s. co m*/ solrClient.commit(); // prove the document got added SolrQuery query = new SolrQuery("*:*"); QueryResponse qResponse = solrClient.query(query); Assert.assertEquals(1, qResponse.getResults().getNumFound()); // run the processor with a delete-by-query command runner.enqueue("<delete><query>first:bob</query></delete>".getBytes("UTF-8")); runner.run(1, false); // prove the document got deleted qResponse = solrClient.query(query); Assert.assertEquals(0, qResponse.getResults().getNumFound()); }
From source file:org.apache.nifi.processors.solr.TestPutSolrContentStream.java
License:Apache License
/** * Verify that given SolrServer contains the expected SolrDocuments. */// w ww . j av a2 s .co m private static void verifySolrDocuments(SolrClient solrServer, Collection<SolrDocument> expectedDocuments) throws IOException, SolrServerException { solrServer.commit(); SolrQuery query = new SolrQuery("*:*"); QueryResponse qResponse = solrServer.query(query); Assert.assertEquals(expectedDocuments.size(), qResponse.getResults().getNumFound()); // verify documents have expected fields and values for (SolrDocument expectedDoc : expectedDocuments) { boolean found = false; for (SolrDocument solrDocument : qResponse.getResults()) { boolean foundAllFields = true; for (String expectedField : expectedDoc.getFieldNames()) { Object expectedVal = expectedDoc.getFirstValue(expectedField); Object actualVal = solrDocument.getFirstValue(expectedField); foundAllFields = expectedVal.equals(actualVal); } if (foundAllFields) { found = true; break; } } Assert.assertTrue("Could not find " + expectedDoc, found); } }