List of usage examples for org.apache.solr.update.processor UpdateRequestProcessor processAdd
public void processAdd(AddUpdateCommand cmd) throws IOException
From source file:com.sindicetech.siren.solr.handler.JsonLoader.java
License:Open Source License
@Override public void load(final SolrQueryRequest req, final SolrQueryResponse rsp, final ContentStream stream, final UpdateRequestProcessor processor) throws Exception { Reader reader = null;// w w w . j av a 2 s .c o m try { reader = stream.getReader(); // keep a copy of the body for the source entry // TODO: instead of reading the stream to make a copy, try to create a copy of the json // while parsing it in the JsonReader String body = IOUtils.toString(reader); FieldMappersHandler mappersHandler = new FieldMappersHandler(fieldMappers, req.getCore()); DocumentBuilder docBuilder = new DocumentBuilder(); // Add the source field entry FieldEntry source = new FieldEntry(SOURCE_FIELDNAME, body); docBuilder.add(mappersHandler.map(source)); // Add the id field initialised with a UUID. It will be overwritten if an id field exist in the JSON document. FieldEntry id = new FieldEntry(IdFieldMapper.INPUT_FIELD, UUID.randomUUID().toString().toLowerCase(Locale.ROOT)); docBuilder.add(mappersHandler.map(id)); JsonParser parser = mapper.getJsonFactory().createJsonParser(new StringReader(body)); JsonReader jreader = new JsonReader(parser); FieldEntry entry; while ((entry = jreader.next()) != null) { docBuilder.add(mappersHandler.map(entry)); } // the index schema might have changed req.updateSchemaToLatest(); // check that we have seen all the required field mappers Set<String> missingRequiredMappers = mappersHandler.getMissingRequiredMappers(); if (!missingRequiredMappers.isEmpty()) { throw new SolrException(BAD_REQUEST, "Document is missing the following required fields: " + missingRequiredMappers); } // Create and process the Add command AddUpdateCommand cmd = new AddUpdateCommand(req); cmd.solrDoc = docBuilder.getSolrInputDocument(); processor.processAdd(cmd); } finally { IOUtils.closeQuietly(reader); } }
From source file:com.sindicetech.siren.solr.UpdateProcessorTestBase.java
License:Open Source License
/** * Runs a document through the specified chain, and returns the final * document used when the chain is completed (NOTE: some chains may * modify the document in place/*from ww w .java 2 s. co m*/ */ protected SolrInputDocument processAdd(final String chain, final SolrParams requestParams, final SolrInputDocument docIn) throws IOException { SolrCore core = h.getCore(); UpdateRequestProcessorChain pc = core.getUpdateProcessingChain(chain); assertNotNull("No Chain named: " + chain, pc); SolrQueryResponse rsp = new SolrQueryResponse(); SolrQueryRequest req = new LocalSolrQueryRequest(core, requestParams); try { AddUpdateCommand cmd = new AddUpdateCommand(req); cmd.solrDoc = docIn; UpdateRequestProcessor processor = pc.createProcessor(req, rsp); processor.processAdd(cmd); return cmd.solrDoc; } finally { req.close(); } }
From source file:de.qaware.chronix.solr.ingestion.AbstractIngestionHandler.java
License:Apache License
private void storeDocument(SolrInputDocument document, UpdateRequestProcessor processor, SolrQueryRequest req) throws IOException { LOGGER.debug("Adding Solr document..."); AddUpdateCommand cmd = new AddUpdateCommand(req); cmd.solrDoc = document;//w w w . j a va 2 s . c o m processor.processAdd(cmd); LOGGER.debug("Added Solr document"); }
From source file:lux.solr.SolrDocWriter.java
License:Mozilla Public License
private void writeToCloud(SolrInputDocument solrDoc, String uri) { ArrayList<String> urls = xqueryComponent.getShardURLs(true); LoggerFactory.getLogger(getClass()).debug("writing " + uri + " to cloud at " + urls); SolrQueryResponse rsp = new SolrQueryResponse(); SolrQueryRequest req = UpdateDocCommand.makeSolrRequest(core); ((ModifiableSolrParams) req.getParams()).add(ShardParams.SHARDS, urls.toArray(new String[urls.size()])); UpdateRequest updateReq = new UpdateRequest(); updateReq.add(solrDoc);/*from w w w . j a va 2 s. c om*/ UpdateDocCommand cmd = new UpdateDocCommand(req, solrDoc, null, uri); UpdateRequestProcessorChain updateChain = xqueryComponent.getCore() .getUpdateProcessingChain("lux-update-chain"); try { UpdateRequestProcessor processor = updateChain.createProcessor(req, rsp); processor.processAdd(cmd); processor.finish(); } catch (IOException e) { throw new LuxException(e); } }
From source file:org.alfresco.solr.component.QueryLoggingComponent.java
License:Open Source License
private void log(ResponseBuilder rb) throws IOException { boolean isShard = rb.req.getParams().getBool(ShardParams.IS_SHARD, false); if (!isShard) { CoreContainer container = rb.req.getCore().getCoreContainer(); SolrCore logCore = container.getCore(rb.req.getCore().getName() + "_qlog"); if (logCore != null) { JSONObject json = (JSONObject) rb.req.getContext().get(AbstractQParser.ALFRESCO_JSON); SolrQueryRequest request = null; UpdateRequestProcessor processor = null; try { request = new LocalSolrQueryRequest(logCore, new NamedList<>()); processor = logCore.getUpdateProcessingChain(null).createProcessor(request, new SolrQueryResponse()); AddUpdateCommand cmd = new AddUpdateCommand(request); cmd.overwrite = true;/*w w w . j a v a 2 s .c o m*/ SolrInputDocument input = new SolrInputDocument(); input.addField("id", GUID.generate()); input.addField("_version_", "1"); input.addField("timestamp", DateTimeFormatter.ISO_INSTANT.format(Instant.now())); if (json != null) { try { ArrayList<String> authorityList = new ArrayList<String>(1); JSONArray authorities = json.getJSONArray("authorities"); for (int i = 0; i < authorities.length(); i++) { String authorityString = authorities.getString(i); authorityList.add(authorityString); } for (String authority : authorityList) { if (AuthorityType.getAuthorityType(authority) == AuthorityType.USER) { input.addField("user", authority); break; } } } catch (JSONException e) { input.addField("user", "<UNKNOWN>"); } } else { input.addField("user", "<UNKNOWN>"); } String userQuery = rb.req.getParams().get(SpellingParams.SPELLCHECK_Q); if (userQuery == null) { if (json != null) { try { userQuery = json.getString("query"); } catch (JSONException e) { } } } if (userQuery == null) { userQuery = rb.req.getParams().get(CommonParams.Q); } if (userQuery != null) { input.addField("user_query", userQuery); } Query query = rb.getQuery(); input.addField("query", query.toString()); if (rb.getResults().docList != null) { input.addField("found", rb.getResults().docList.matches()); } input.addField("time", rb.req.getRequestTimer().getTime()); cmd.solrDoc = input; processor.processAdd(cmd); } finally { if (processor != null) { processor.finish(); } if (request != null) { request.close(); } } } } }
From source file:org.alfresco.solr.SolrInformationServer.java
License:Open Source License
@Override public long indexAcl(List<AclReaders> aclReaderList, boolean overwrite) throws IOException { long start = System.nanoTime(); UpdateRequestProcessor processor = null; try (SolrQueryRequest request = newSolrQueryRequest()) { processor = this.core.getUpdateProcessingChain(null).createProcessor(request, newSolrQueryResponse()); for (AclReaders aclReaders : aclReaderList) { AddUpdateCommand cmd = new AddUpdateCommand(request); cmd.overwrite = overwrite;//from ww w . ja va 2s. c o m SolrInputDocument input = new SolrInputDocument(); String id = AlfrescoSolrDataModel.getAclDocumentId(aclReaders.getTenantDomain(), aclReaders.getId()); input.addField(FIELD_SOLR4_ID, id); input.addField(FIELD_VERSION, "0"); input.addField(FIELD_ACLID, aclReaders.getId()); input.addField(FIELD_INACLTXID, aclReaders.getAclChangeSetId()); String tenant = aclReaders.getTenantDomain(); for (String reader : aclReaders.getReaders()) { reader = addTenantToAuthority(reader, tenant); input.addField(FIELD_READER, reader); } for (String denied : aclReaders.getDenied()) { denied = addTenantToAuthority(denied, tenant); input.addField(FIELD_DENIED, denied); } input.addField(FIELD_DOC_TYPE, DOC_TYPE_ACL); cmd.solrDoc = input; processor.processAdd(cmd); } } finally { if (processor != null) { processor.finish(); } } long end = System.nanoTime(); return (end - start); }
From source file:org.alfresco.solr.SolrInformationServer.java
License:Open Source License
@Override public void indexAclTransaction(AclChangeSet changeSet, boolean overwrite) throws IOException { canUpdate();//from w w w .jav a 2 s . c o m UpdateRequestProcessor processor = null; try (SolrQueryRequest request = newSolrQueryRequest()) { processor = this.core.getUpdateProcessingChain(null).createProcessor(request, newSolrQueryResponse()); AddUpdateCommand cmd = new AddUpdateCommand(request); cmd.overwrite = overwrite; SolrInputDocument input = new SolrInputDocument(); input.addField(FIELD_SOLR4_ID, AlfrescoSolrDataModel.getAclChangeSetDocumentId(changeSet.getId())); input.addField(FIELD_VERSION, "0"); input.addField(FIELD_ACLTXID, changeSet.getId()); input.addField(FIELD_INACLTXID, changeSet.getId()); input.addField(FIELD_ACLTXCOMMITTIME, changeSet.getCommitTimeMs()); input.addField(FIELD_DOC_TYPE, DOC_TYPE_ACL_TX); cmd.solrDoc = input; processor.processAdd(cmd); putAclTransactionState(processor, request, changeSet); } finally { if (processor != null) { processor.finish(); } } }
From source file:org.alfresco.solr.SolrInformationServer.java
License:Open Source License
@Override public void capIndex(long dbid) throws IOException { UpdateRequestProcessor processor = null; try (SolrQueryRequest request = newSolrQueryRequest()) { processor = this.core.getUpdateProcessingChain(null).createProcessor(request, newSolrQueryResponse()); SolrInputDocument input = new SolrInputDocument(); input.addField(FIELD_SOLR4_ID, INDEX_CAP_ID); input.addField(FIELD_VERSION, 0); input.addField(FIELD_DBID, -dbid); //Making this negative to ensure it is never confused with node DBID input.addField(FIELD_DOC_TYPE, DOC_TYPE_STATE); AddUpdateCommand cmd = new AddUpdateCommand(request); cmd.overwrite = true;/*w ww .j a v a 2 s . c om*/ cmd.solrDoc = input; processor.processAdd(cmd); } finally { if (processor != null) { processor.finish(); } } }
From source file:org.alfresco.solr.SolrInformationServer.java
License:Open Source License
@Override public void indexNode(Node node, boolean overwrite) throws IOException, JSONException { SolrQueryRequest request = null;// w w w . j a v a 2 s .c o m UpdateRequestProcessor processor = null; try { request = newSolrQueryRequest(); processor = this.core.getUpdateProcessingChain(null).createProcessor(request, newSolrQueryResponse()); long start = System.nanoTime(); if ((node.getStatus() == SolrApiNodeStatus.DELETED) || (node.getStatus() == SolrApiNodeStatus.NON_SHARD_DELETED) || (node.getStatus() == SolrApiNodeStatus.NON_SHARD_UPDATED) || (node.getStatus() == SolrApiNodeStatus.UNKNOWN)) { // fix up any secondary paths NodeMetaDataParameters nmdp = new NodeMetaDataParameters(); nmdp.setFromNodeId(node.getId()); nmdp.setToNodeId(node.getId()); List<NodeMetaData> nodeMetaDatas; if ((node.getStatus() == SolrApiNodeStatus.DELETED) || (node.getStatus() == SolrApiNodeStatus.NON_SHARD_DELETED) || (node.getStatus() == SolrApiNodeStatus.NON_SHARD_UPDATED)) { // Fake the empty node metadata for this parent deleted node NodeMetaData nodeMetaData = createDeletedNodeMetaData(node); nodeMetaDatas = Collections.singletonList(nodeMetaData); } else { nodeMetaDatas = repositoryClient.getNodesMetaData(nmdp, Integer.MAX_VALUE); } NodeMetaData nodeMetaData; if (!nodeMetaDatas.isEmpty()) { nodeMetaData = nodeMetaDatas.get(0); if (!(nodeMetaData.getTxnId() > node.getTxnId())) { if (node.getStatus() == SolrApiNodeStatus.DELETED) { try { //Lock the node to ensure that no other trackers work with this node until this code completes. lock(nodeMetaData.getId()); solrContentStore.removeDocFromContentStore(nodeMetaData); } finally { unlock(nodeMetaData.getId()); } } } // else, the node has moved on to a later transaction, and it will be indexed later } deleteNode(processor, request, node); } if ((node.getStatus() == SolrApiNodeStatus.UPDATED) || (node.getStatus() == SolrApiNodeStatus.UNKNOWN) || (node.getStatus() == SolrApiNodeStatus.NON_SHARD_UPDATED)) { long nodeId = node.getId(); LOGGER.info("Updating node {}", nodeId); try { lock(nodeId); NodeMetaDataParameters nmdp = new NodeMetaDataParameters(); nmdp.setFromNodeId(node.getId()); nmdp.setToNodeId(node.getId()); List<NodeMetaData> nodeMetaDatas = repositoryClient.getNodesMetaData(nmdp, Integer.MAX_VALUE); AddUpdateCommand addDocCmd = new AddUpdateCommand(request); addDocCmd.overwrite = overwrite; if (!nodeMetaDatas.isEmpty()) { NodeMetaData nodeMetaData = nodeMetaDatas.get(0); if (node.getTxnId() == Long.MAX_VALUE) { //This is a re-index. We need to clear the txnId from the pr this.cleanContentCache.remove(nodeMetaData.getTxnId()); } if ((node.getStatus() == SolrApiNodeStatus.UPDATED) || (node.getStatus() == SolrApiNodeStatus.UNKNOWN)) { // check index control Map<QName, PropertyValue> properties = nodeMetaData.getProperties(); StringPropertyValue pValue = (StringPropertyValue) properties .get(ContentModel.PROP_IS_INDEXED); if (pValue != null) { boolean isIndexed = Boolean.valueOf(pValue.getValue()); if (!isIndexed) { LOGGER.debug("Clearing unindexed"); deleteNode(processor, request, node); SolrInputDocument doc = createNewDoc(nodeMetaData, DOC_TYPE_UNINDEXED_NODE); solrContentStore.storeDocOnSolrContentStore(nodeMetaData, doc); addDocCmd.solrDoc = doc; processor.processAdd(addDocCmd); long end = System.nanoTime(); this.trackerStats.addNodeTime(end - start); return; } } deleteNode(processor, request, node); SolrInputDocument doc = createNewDoc(nodeMetaData, DOC_TYPE_NODE); addToNewDocAndCache(nodeMetaData, doc); addDocCmd.solrDoc = doc; processor.processAdd(addDocCmd); } } // Ends checking for a nodeMetaData } finally { unlock(nodeId); } } // Ends checking for updated or unknown node status long end = System.nanoTime(); this.trackerStats.addNodeTime(end - start); } catch (Exception e) { LOGGER.warning("Node index failed and skipped for {} in Tx {}", node.getId(), node.getTxnId(), e); if (processor == null) { if (request == null) { request = newSolrQueryRequest(); } processor = this.core.getUpdateProcessingChain(null).createProcessor(request, newSolrQueryResponse()); } deleteNode(processor, request, node); AddUpdateCommand addDocCmd = new AddUpdateCommand(request); addDocCmd.overwrite = overwrite; SolrInputDocument doc = new SolrInputDocument(); doc.addField(FIELD_SOLR4_ID, PREFIX_ERROR + node.getId()); doc.addField(FIELD_VERSION, "0"); doc.addField(FIELD_DBID, node.getId()); doc.addField(FIELD_INTXID, node.getTxnId()); doc.addField(FIELD_EXCEPTION_MESSAGE, e.getMessage()); doc.addField(FIELD_DOC_TYPE, DOC_TYPE_ERROR_NODE); StringWriter stringWriter = new StringWriter(4096); try (PrintWriter printWriter = new PrintWriter(stringWriter, true)) { e.printStackTrace(printWriter); String stack = stringWriter.toString(); doc.addField(FIELD_EXCEPTION_STACK, stack.length() < 32766 ? stack : stack.substring(0, 32765)); } addDocCmd.solrDoc = doc; processor.processAdd(addDocCmd); } finally { if (processor != null) { processor.finish(); } if (request != null) { request.close(); } } }
From source file:org.alfresco.solr.SolrInformationServer.java
License:Open Source License
@Override public void updateContentToIndexAndCache(long dbId, String tenant) throws Exception { SolrQueryRequest request = null;/*from w w w . j a v a 2 s . co m*/ UpdateRequestProcessor processor = null; try { lock(dbId); request = newSolrQueryRequest(); processor = this.core.getUpdateProcessingChain(null).createProcessor(request, newSolrQueryResponse()); SolrInputDocument doc = solrContentStore.retrieveDocFromSolrContentStore(tenant, dbId); if (doc == null) { LOGGER.warning("There is no cached doc in the Solr content store with tenant [" + tenant + "] and dbId [" + dbId + "].\n" + "This should only happen if the content has been removed from the Solr content store.\n" + "Recreating cached doc ... "); doc = recreateSolrDoc(dbId, tenant); // if we did not build it again it has been deleted // We do the delete here to avoid doing this again if it for some reason persists in teh index // This is a work around for ACE-3228/ACE-3258 and the way stores are expunged when deleting a tenant if (doc == null) { deleteNode(processor, request, dbId); } } if (doc != null) { addContentToDoc(doc, dbId); // Marks as clean since the doc's content is now up to date markFTSStatus(doc, FTSStatus.Clean); solrContentStore.storeDocOnSolrContentStore(tenant, dbId, doc); // Add to index AddUpdateCommand addDocCmd = new AddUpdateCommand(request); addDocCmd.overwrite = true; addDocCmd.solrDoc = doc; processor.processAdd(addDocCmd); } } finally { unlock(dbId); if (processor != null) { processor.finish(); } if (request != null) { request.close(); } } }