List of usage examples for org.apache.solr.client.solrj.request ContentStreamUpdateRequest ContentStreamUpdateRequest
public ContentStreamUpdateRequest(String url)
From source file:actors.SolrActor.java
License:Apache License
public void indexUpdated(SolrIndexEvent msg) { try {// w ww . j av a 2s.c o m System.out.println("SolrIndexEvent"); SolrInputDocument doc = msg.getDocuement(); //Making realtime GET System.out.println("GET"); SolrQuery parameters = new SolrQuery(); parameters.setRequestHandler("/get"); String f1 = doc.getFieldValue("literal.id").toString(); String f2 = doc.getFieldValue("literal.rev").toString(); parameters.set("id", f1); parameters.set("rev", f2); //System.out.println(parameters); QueryResponse response = server.query(parameters); NamedList<Object> result = response.getResponse(); //System.out.println(response.getResponse()); //System.out.println(result.size() ); //System.out.println(); //System.out.println(result); //validate the doc exists if (result == null || result.get("doc") == null) { System.out.println("/update/extract"); ContentStreamUpdateRequest req = new ContentStreamUpdateRequest("/update/extract"); // url dropbox URL url = new URL(doc.getFieldValue("literal.links").toString()); ContentStreamBase content = new ContentStreamBase.URLStream(url); System.out.println("ContentStreamBase"); req.addContentStream(content); // Adittionall metadata req.setParam("literal.id", doc.getFieldValue("literal.id").toString()); req.setParam("literal.title", doc.getFieldValue("literal.title").toString()); req.setParam("literal.rev", doc.getFieldValue("literal.rev").toString()); req.setParam("literal.when", doc.getFieldValue("literal.when").toString()); req.setParam("literal.path", doc.getFieldValue("literal.path").toString()); req.setParam("literal.icon", doc.getFieldValue("literal.icon").toString()); req.setParam("literal.size", doc.getFieldValue("literal.size").toString()); req.setParam("literal.url", doc.getFieldValue("literal.links").toString()); req.setParam("uprefix", "attr_"); req.setParam("fmap.content", "attr_content"); req.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); //Requesting Solr result = server.request(req); //System.out.println("Result: " + result.toString()); } else { System.out.println("It's already update"); } } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } }
From source file:at.kc.tugraz.ss.service.solr.impl.SSSolrImpl.java
License:Apache License
@Override public void solrAddDoc(final SSServPar parA) throws Exception { // according to Solr specification by adding a document with an ID already // existing in the index will replace the document (eg. refer to // http://stackoverflow.com/questions/8494923/solr-block-updating-of-existing-document or // http://lucene.apache.org/solr/api-4_0_0-ALPHA/doc-files/tutorial.html ) try {//w w w . j a v a2 s .c om final SSSolrAddDocPar par = new SSSolrAddDocPar(parA); final ContentStreamUpdateRequest csur = new ContentStreamUpdateRequest("/update/extract"); final NamedList<Object> response; csur.addContentStream(new ContentStreamBase.FileStream(new File(localWorkPath + par.id))); csur.setParam("literal.id", par.id); csur.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); response = solrUpdater.request(csur); SSLogU.info("document w/ id " + par.id + " added successfully. "); } catch (Exception error) { SSServErrReg.regErrThrow(error); } }
From source file:at.tugraz.sss.servs.db.impl.SSDBNoSQLSolrImpl.java
License:Apache License
@Override public void addDoc(final SSDBNoSQLAddDocPar par) throws SSErr { // according to Solr specification by adding a document with an ID already // existing in the index will replace the document (eg. refer to // http://stackoverflow.com/questions/8494923/solr-block-updating-of-existing-document or // http://lucene.apache.org/solr/api-4_0_0-ALPHA/doc-files/tutorial.html ) try {/*from w ww. j a va 2 s .c o m*/ final ContentStreamUpdateRequest csur = new ContentStreamUpdateRequest("/update/extract"); final NamedList<Object> response; csur.addContentStream(new ContentStreamBase.FileStream(new File(SSConf.getLocalWorkPath() + par.id))); csur.setParam("literal.id", par.id); // csur.setParam ("stream.type", "application/octet-stream"); csur.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); response = solrServer.request(csur); SSLogU.info("document w/ id " + par.id + " added successfully. "); } catch (Exception error) { SSServErrReg.regErrThrow(error); } }
From source file:com.hortonworks.streamline.streams.runtime.storm.bolt.solr.StreamlineSolrJsonMapper.java
License:Apache License
private SolrRequest<UpdateResponse> createSolrRequest(String json) { final ContentStreamUpdateRequest request = new ContentStreamUpdateRequest(jsonUpdateUrl); final ContentStream cs = new ContentStreamBase.StringStream(json, CONTENT_TYPE); request.addContentStream(cs);//www . j a v a2s .c o m LOG.debug("Request generated with JSON: {}", json); return request; }
From source file:kesako.watcher.runnable.IndexFileProcess.java
License:Apache License
/** * Index the file fToIndex.<br>// ww w . j a v a 2s. com * To index a file, the method create a ContentStreamUpdateRequest object with the url "/update/extract".<br> * Then the object is initialized with initContentStream method, and sent to SOLR server. * @throws SQLException * @throws IOException * @throws SolrServerException */ private void indexFile(Connection cn, int idFichier) { String sqlQuery = ""; ResultSet rs; File fToIndex = null; ContentStreamUpdateRequest up = new ContentStreamUpdateRequest("/update/extract"); try { sqlQuery = "start transaction"; DBUtilities.executeQuery(cn, sqlQuery); sqlQuery = "select chemin as fileURI from t_fichiers where id_Fichier=" + idFichier; rs = DBUtilities.executeQuery(cn, sqlQuery); rs.next(); fToIndex = new File(rs.getString("fileURI")); logger.debug("indexation fichier " + fToIndex.getAbsolutePath()); initContentStream(cn, up, idFichier, fToIndex); //indexing the file SOLRUtilities.getSOLRServer().request(up); sqlQuery = "update t_fichiers set " + "FLAG=" + Constant.INDEXED + " where id_fichier=" + idFichier; DBUtilities.executeQuery(cn, sqlQuery); cn.commit(); } catch (SQLException e) { logger.debug("ERROR index :" + sqlQuery, e); try { cn.rollback(); } catch (SQLException e1) { logger.fatal("ERROR Rollback : " + sqlQuery, e1); } } catch (Exception e) { if (e.getMessage().indexOf("tika") > 0) { logger.fatal("TIKA ERROR : " + fToIndex.getPath()); } else { logger.fatal("Indexing ERROR : " + fToIndex.getPath(), e); } try { sqlQuery = "update t_fichiers set " + "FLAG=" + Constant.INDEXED_ERROR + " where id_fichier=" + idFichier; DBUtilities.executeQuery(cn, sqlQuery); cn.commit(); } catch (SQLException e1) { logger.fatal(sqlQuery, e1); try { cn.rollback(); } catch (SQLException e2) { logger.fatal("ERROR Rollback : " + sqlQuery, e2); } } } up = null; }
From source file:net.yacy.cora.federate.solr.connector.SolrServerConnector.java
License:Open Source License
public void add(final File file, final String solrId) throws IOException { final ContentStreamUpdateRequest up = new ContentStreamUpdateRequest("/update/extract"); up.addFile(file, "application/octet-stream"); up.setParam("literal.id", solrId); up.setParam("uprefix", "attr_"); up.setParam("fmap.content", "attr_content"); up.setCommitWithin(-1);/*ww w . j a va 2 s . c o m*/ //up.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); try { this.server.request(up); } catch (final Throwable e) { clearCaches(); // prevent further OOM if this was caused by OOM throw new IOException(e); } }
From source file:org.apache.camel.component.solr.SolrProducer.java
License:Apache License
private void insert(Exchange exchange, boolean isStreaming) throws Exception { Object body = exchange.getIn().getBody(); if (body instanceof WrappedFile) { body = ((WrappedFile<?>) body).getFile(); }//from w w w . j a v a 2 s . c om if (body instanceof File) { MimetypesFileTypeMap mimeTypesMap = new MimetypesFileTypeMap(); String mimeType = mimeTypesMap.getContentType((File) body); ContentStreamUpdateRequest updateRequest = new ContentStreamUpdateRequest(getRequestHandler()); updateRequest.addFile((File) body, mimeType); for (Map.Entry<String, Object> entry : exchange.getIn().getHeaders().entrySet()) { if (entry.getKey().startsWith(SolrConstants.PARAM)) { String paramName = entry.getKey().substring(SolrConstants.PARAM.length()); updateRequest.setParam(paramName, entry.getValue().toString()); } } if (isStreaming) { updateRequest.process(streamingSolrServer); } else { updateRequest.process(solrServer); } } else if (body instanceof SolrInputDocument) { UpdateRequest updateRequest = new UpdateRequest(getRequestHandler()); updateRequest.add((SolrInputDocument) body); if (isStreaming) { updateRequest.process(streamingSolrServer); } else { updateRequest.process(solrServer); } } else { boolean hasSolrHeaders = false; for (Map.Entry<String, Object> entry : exchange.getIn().getHeaders().entrySet()) { if (entry.getKey().startsWith(SolrConstants.FIELD)) { hasSolrHeaders = true; break; } } if (hasSolrHeaders) { UpdateRequest updateRequest = new UpdateRequest(getRequestHandler()); SolrInputDocument doc = new SolrInputDocument(); for (Map.Entry<String, Object> entry : exchange.getIn().getHeaders().entrySet()) { if (entry.getKey().startsWith(SolrConstants.FIELD)) { String fieldName = entry.getKey().substring(SolrConstants.FIELD.length()); doc.setField(fieldName, entry.getValue()); } } updateRequest.add(doc); if (isStreaming) { updateRequest.process(streamingSolrServer); } else { updateRequest.process(solrServer); } } else if (body instanceof String) { String bodyAsString = (String) body; if (!bodyAsString.startsWith("<add")) { bodyAsString = "<add>" + bodyAsString + "</add>"; } DirectXmlRequest xmlRequest = new DirectXmlRequest(getRequestHandler(), bodyAsString); if (isStreaming) { streamingSolrServer.request(xmlRequest); } else { solrServer.request(xmlRequest); } } else { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "unable to find data in Exchange to update Solr"); } } }
From source file:org.apache.nifi.processors.solr.PutSolrContentStream.java
License:Apache License
@Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { FlowFile flowFile = session.get();//from w ww . j a v a 2 s. co m if (flowFile == null) { return; } final AtomicReference<Exception> error = new AtomicReference<>(null); final AtomicReference<Exception> connectionError = new AtomicReference<>(null); final boolean isSolrCloud = SOLR_TYPE_CLOUD.equals(context.getProperty(SOLR_TYPE).getValue()); final String collection = context.getProperty(COLLECTION).evaluateAttributeExpressions(flowFile).getValue(); final Long commitWithin = context.getProperty(COMMIT_WITHIN).evaluateAttributeExpressions(flowFile) .asLong(); final String contentStreamPath = context.getProperty(CONTENT_STREAM_PATH) .evaluateAttributeExpressions(flowFile).getValue(); final MultiMapSolrParams requestParams = new MultiMapSolrParams(getRequestParams(context, flowFile)); StopWatch timer = new StopWatch(true); session.read(flowFile, new InputStreamCallback() { @Override public void process(final InputStream in) throws IOException { ContentStreamUpdateRequest request = new ContentStreamUpdateRequest(contentStreamPath); request.setParams(new ModifiableSolrParams()); // add the extra params, don't use 'set' in case of repeating params Iterator<String> paramNames = requestParams.getParameterNamesIterator(); while (paramNames.hasNext()) { String paramName = paramNames.next(); for (String paramValue : requestParams.getParams(paramName)) { request.getParams().add(paramName, paramValue); } } // specify the collection for SolrCloud if (isSolrCloud) { request.setParam(COLLECTION_PARAM_NAME, collection); } if (commitWithin != null && commitWithin > 0) { request.setParam(COMMIT_WITHIN_PARAM_NAME, commitWithin.toString()); } // if a username and password were provided then pass them for basic auth if (isBasicAuthEnabled()) { request.setBasicAuthCredentials(getUsername(), getPassword()); } try (final BufferedInputStream bufferedIn = new BufferedInputStream(in)) { // add the FlowFile's content on the UpdateRequest request.addContentStream(new ContentStreamBase() { @Override public InputStream getStream() throws IOException { return bufferedIn; } @Override public String getContentType() { return context.getProperty(CONTENT_TYPE).evaluateAttributeExpressions().getValue(); } }); UpdateResponse response = request.process(getSolrClient()); getLogger().debug("Got {} response from Solr", new Object[] { response.getStatus() }); } catch (SolrException e) { error.set(e); } catch (SolrServerException e) { if (causedByIOException(e)) { connectionError.set(e); } else { error.set(e); } } catch (IOException e) { connectionError.set(e); } } }); timer.stop(); if (error.get() != null) { getLogger().error("Failed to send {} to Solr due to {}; routing to failure", new Object[] { flowFile, error.get() }); session.transfer(flowFile, REL_FAILURE); } else if (connectionError.get() != null) { getLogger().error("Failed to send {} to Solr due to {}; routing to connection_failure", new Object[] { flowFile, connectionError.get() }); flowFile = session.penalize(flowFile); session.transfer(flowFile, REL_CONNECTION_FAILURE); } else { StringBuilder transitUri = new StringBuilder("solr://"); transitUri.append(getSolrLocation()); if (isSolrCloud) { transitUri.append(":").append(collection); } final long duration = timer.getDuration(TimeUnit.MILLISECONDS); session.getProvenanceReporter().send(flowFile, transitUri.toString(), duration, true); getLogger().info("Successfully sent {} to Solr in {} millis", new Object[] { flowFile, duration }); session.transfer(flowFile, REL_SUCCESS); } }
From source file:org.apache.storm.solr.mapper.SolrJsonMapper.java
License:Apache License
private SolrRequest createtSolrRequest(String json) { final ContentStreamUpdateRequest request = new ContentStreamUpdateRequest(jsonUpdateUrl); final ContentStream cs = new ContentStreamBase.StringStream(json, CONTENT_TYPE); request.addContentStream(cs);//from w w w. j ava2s. c o m if (logger.isDebugEnabled()) { logger.debug("Request generated with JSON: " + json); } return request; }
From source file:org.craftercms.search.service.impl.SolrSearchService.java
License:Open Source License
@Override public void updateContent(String indexId, String site, String id, File file, Map<String, List<String>> additionalFields) throws SearchException { if (StringUtils.isEmpty(indexId)) { indexId = defaultIndexId;/*w w w. j a va2 s .c o m*/ } String finalId = site + ":" + id; String fileName = FilenameUtils.getName(id); String contentType = mimeTypesMap.getContentType(fileName); ContentStreamUpdateRequest request = new ContentStreamUpdateRequest(SOLR_CONTENT_STREAM_UPDATE_URL); NamedList<Object> response; try { ModifiableSolrParams params = solrDocumentBuilder.buildParams(site, id, ExtractingParams.LITERALS_PREFIX, null, additionalFields); params.set(ExtractingParams.LITERALS_PREFIX + fileNameFieldName, fileName); request.setParams(params); request.addFile(file, contentType); request.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); response = solrClient.request(request, indexId); } catch (SolrServerException e) { logger.warn("{}Unable to update file through content stream request: {}. Attempting to perform just " + "the metadata update", getIndexPrefix(indexId), e.getMessage()); try { SolrInputDocument inputDocument = solrDocumentBuilder.build(site, id, additionalFields); inputDocument.setField(fileNameFieldName, fileName); response = solrClient.add(indexId, inputDocument).getResponse(); } catch (IOException e1) { throw new SearchException(indexId, "I/O error while executing update file for " + finalId, e1); } catch (SolrServerException e1) { throw new SearchException(indexId, e1.getMessage(), e1); } } catch (IOException e) { throw new SearchException(indexId, "I/O error while executing update file for " + finalId, e); } if (logger.isDebugEnabled()) { logger.debug(getSuccessfulMessage(indexId, finalId, "Update file", response)); } }