Example usage for org.apache.solr.common.params MultiMapSolrParams MultiMapSolrParams

List of usage examples for org.apache.solr.common.params MultiMapSolrParams MultiMapSolrParams

Introduction

In this page you can find the example usage for org.apache.solr.common.params MultiMapSolrParams MultiMapSolrParams.

Prototype

public MultiMapSolrParams(Map<String, String[]> map) 

Source Link

Usage

From source file:net.hasor.search.server.rsf.service.SorlSearchService.java

License:Apache License

@Override
public QuerySearchResult query(SearchQuery searchQuery) throws Throwable {
    SolrQuery solrQuery = new SolrQuery();
    solrQuery.add(new MultiMapSolrParams(searchQuery.toMap()));
    QueryResponse response = getSolrClient().query(solrQuery);
    SolrDocumentList docList = response.getResults();
    ////  w w w  .jav  a 2 s  .c  om
    List<SearchDocument> documentList = new ArrayList<SearchDocument>();
    if (docList != null) {
        for (SolrDocument solrDocument : docList) {
            SearchDocument document = convetTo(solrDocument);
            documentList.add(document);
        }
    }
    //
    QuerySearchResult searchResult = new QuerySearchResult(documentList);
    searchResult.setElapsedTime(response.getElapsedTime());
    searchResult.setMaxScore(docList.getMaxScore());
    searchResult.setNumFound(docList.getNumFound());
    searchResult.setStart(docList.getStart());
    searchResult.setStatus(response.getStatus());
    searchResult.setQueryTime(response.getQTime());
    return searchResult;
}

From source file:net.yacy.server.serverObjects.java

License:Open Source License

public serverObjects() {
    super();
    this.map = new MultiMapSolrParams(new HashMap<String, String[]>());
}

From source file:net.yacy.server.serverObjects.java

License:Open Source License

protected serverObjects(final Map<String, String[]> input) {
    super();//  w  ww  .  j  av  a 2 s. com
    this.map = new MultiMapSolrParams(input);
}

From source file:org.apache.nifi.processors.solr.PutSolrContentStream.java

License:Apache License

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();/*from w ww . j a v a  2s.  c om*/
    if (flowFile == null) {
        return;
    }

    final AtomicReference<Exception> error = new AtomicReference<>(null);
    final AtomicReference<Exception> connectionError = new AtomicReference<>(null);

    final boolean isSolrCloud = SOLR_TYPE_CLOUD.equals(context.getProperty(SOLR_TYPE).getValue());
    final String collection = context.getProperty(COLLECTION).evaluateAttributeExpressions(flowFile).getValue();
    final Long commitWithin = context.getProperty(COMMIT_WITHIN).evaluateAttributeExpressions(flowFile)
            .asLong();
    final String contentStreamPath = context.getProperty(CONTENT_STREAM_PATH)
            .evaluateAttributeExpressions(flowFile).getValue();
    final MultiMapSolrParams requestParams = new MultiMapSolrParams(getRequestParams(context, flowFile));

    StopWatch timer = new StopWatch(true);
    session.read(flowFile, new InputStreamCallback() {
        @Override
        public void process(final InputStream in) throws IOException {
            ContentStreamUpdateRequest request = new ContentStreamUpdateRequest(contentStreamPath);
            request.setParams(new ModifiableSolrParams());

            // add the extra params, don't use 'set' in case of repeating params
            Iterator<String> paramNames = requestParams.getParameterNamesIterator();
            while (paramNames.hasNext()) {
                String paramName = paramNames.next();
                for (String paramValue : requestParams.getParams(paramName)) {
                    request.getParams().add(paramName, paramValue);
                }
            }

            // specify the collection for SolrCloud
            if (isSolrCloud) {
                request.setParam(COLLECTION_PARAM_NAME, collection);
            }

            if (commitWithin != null && commitWithin > 0) {
                request.setParam(COMMIT_WITHIN_PARAM_NAME, commitWithin.toString());
            }

            // if a username and password were provided then pass them for basic auth
            if (isBasicAuthEnabled()) {
                request.setBasicAuthCredentials(getUsername(), getPassword());
            }

            try (final BufferedInputStream bufferedIn = new BufferedInputStream(in)) {
                // add the FlowFile's content on the UpdateRequest
                request.addContentStream(new ContentStreamBase() {
                    @Override
                    public InputStream getStream() throws IOException {
                        return bufferedIn;
                    }

                    @Override
                    public String getContentType() {
                        return context.getProperty(CONTENT_TYPE).evaluateAttributeExpressions().getValue();
                    }
                });

                UpdateResponse response = request.process(getSolrClient());
                getLogger().debug("Got {} response from Solr", new Object[] { response.getStatus() });
            } catch (SolrException e) {
                error.set(e);
            } catch (SolrServerException e) {
                if (causedByIOException(e)) {
                    connectionError.set(e);
                } else {
                    error.set(e);
                }
            } catch (IOException e) {
                connectionError.set(e);
            }
        }
    });
    timer.stop();

    if (error.get() != null) {
        getLogger().error("Failed to send {} to Solr due to {}; routing to failure",
                new Object[] { flowFile, error.get() });
        session.transfer(flowFile, REL_FAILURE);
    } else if (connectionError.get() != null) {
        getLogger().error("Failed to send {} to Solr due to {}; routing to connection_failure",
                new Object[] { flowFile, connectionError.get() });
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_CONNECTION_FAILURE);
    } else {
        StringBuilder transitUri = new StringBuilder("solr://");
        transitUri.append(getSolrLocation());
        if (isSolrCloud) {
            transitUri.append(":").append(collection);
        }

        final long duration = timer.getDuration(TimeUnit.MILLISECONDS);
        session.getProvenanceReporter().send(flowFile, transitUri.toString(), duration, true);
        getLogger().info("Successfully sent {} to Solr in {} millis", new Object[] { flowFile, duration });
        session.transfer(flowFile, REL_SUCCESS);
    }
}

From source file:org.apache.nifi.processors.solr.PutSolrRecord.java

License:Apache License

@Override
public void doOnTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();/*from  w  ww.j  a v  a 2  s  .com*/
    if (flowFile == null) {
        return;
    }

    final AtomicReference<Exception> error = new AtomicReference<>(null);
    final AtomicReference<Exception> connectionError = new AtomicReference<>(null);

    final boolean isSolrCloud = SOLR_TYPE_CLOUD.getValue().equals(context.getProperty(SOLR_TYPE).getValue());
    final String collection = context.getProperty(COLLECTION).evaluateAttributeExpressions(flowFile).getValue();
    final Long commitWithin = context.getProperty(COMMIT_WITHIN).evaluateAttributeExpressions(flowFile)
            .asLong();
    final String contentStreamPath = context.getProperty(UPDATE_PATH).evaluateAttributeExpressions(flowFile)
            .getValue();
    final MultiMapSolrParams requestParams = new MultiMapSolrParams(
            SolrUtils.getRequestParams(context, flowFile));
    final RecordReaderFactory readerFactory = context.getProperty(RECORD_READER)
            .asControllerService(RecordReaderFactory.class);
    final String fieldsToIndex = context.getProperty(FIELDS_TO_INDEX).evaluateAttributeExpressions(flowFile)
            .getValue();
    final Long batchSize = context.getProperty(BATCH_SIZE).evaluateAttributeExpressions(flowFile).asLong();

    final List<String> fieldList = new ArrayList<>();
    if (!StringUtils.isBlank(fieldsToIndex)) {
        Arrays.stream(fieldsToIndex.split(",")).forEach(f -> fieldList.add(f.trim()));
    }
    StopWatch timer = new StopWatch(true);
    try (final InputStream in = session.read(flowFile);
            final RecordReader reader = readerFactory.createRecordReader(flowFile, in, getLogger())) {

        Record record;
        List<SolrInputDocument> inputDocumentList = new LinkedList<>();
        try {
            while ((record = reader.nextRecord()) != null) {
                SolrInputDocument inputDoc = new SolrInputDocument();
                writeRecord(record, inputDoc, fieldList, EMPTY_STRING);
                inputDocumentList.add(inputDoc);
                if (inputDocumentList.size() == batchSize) {
                    index(isSolrCloud, collection, commitWithin, contentStreamPath, requestParams,
                            inputDocumentList);
                    inputDocumentList = new ArrayList<>();
                }
                index(isSolrCloud, collection, commitWithin, contentStreamPath, requestParams,
                        inputDocumentList);
            }
        } catch (SolrException e) {
            error.set(e);
        } catch (SolrServerException e) {
            if (causedByIOException(e)) {
                //Exit in case of a solr connection error
                connectionError.set(e);
            } else {
                error.set(e);
            }
        } catch (IOException e) {
            //Exit in case of a solr connection error
            connectionError.set(e);
        }
    } catch (final IOException | SchemaNotFoundException | MalformedRecordException e) {
        getLogger().error("Could not parse incoming data", e);
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
        return;
    }
    timer.stop();

    if (error.get() != null) {
        getLogger().error("Failed to send all the records of the {} to Solr due to {}; routing to failure",
                new Object[] { flowFile, error.get() });
        session.transfer(flowFile, REL_FAILURE);
    } else if (connectionError.get() != null) {
        getLogger().error("Failed to send {} to Solr due to {}; routing to connection_failure",
                new Object[] { flowFile, connectionError.get() });
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_CONNECTION_FAILURE);
    } else {
        StringBuilder transitUri = new StringBuilder("solr://");
        transitUri.append(getSolrLocation());
        if (isSolrCloud) {
            transitUri.append(":").append(collection);
        }

        final long duration = timer.getDuration(TimeUnit.MILLISECONDS);
        session.getProvenanceReporter().send(flowFile, transitUri.toString(), duration, true);
        getLogger().info("Successfully sent {} to Solr in {} millis", new Object[] { flowFile, duration });
        session.transfer(flowFile, REL_SUCCESS);
    }
}

From source file:org.apache.nifi.processors.solr.QuerySolr.java

License:Apache License

@Override
public void doOnTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final ComponentLog logger = getLogger();

    FlowFile flowFileOriginal = session.get();
    FlowFile flowFileResponse;/*w  w  w.  ja v  a2 s . com*/

    if (flowFileOriginal == null) {
        if (context.hasNonLoopConnection()) {
            return;
        }
        flowFileResponse = session.create();
    } else {
        flowFileResponse = session.create(flowFileOriginal);
    }

    final SolrQuery solrQuery = new SolrQuery();
    final boolean isSolrCloud = SOLR_TYPE_CLOUD.equals(context.getProperty(SOLR_TYPE).getValue());
    final String collection = context.getProperty(COLLECTION).evaluateAttributeExpressions(flowFileResponse)
            .getValue();

    final StringBuilder transitUri = new StringBuilder("solr://");
    transitUri.append(getSolrLocation());
    if (isSolrCloud) {
        transitUri.append(":").append(collection);
    }
    final StopWatch timer = new StopWatch(false);

    try {
        solrQuery.setQuery(context.getProperty(SOLR_PARAM_QUERY).evaluateAttributeExpressions(flowFileResponse)
                .getValue());
        solrQuery.setRequestHandler(context.getProperty(SOLR_PARAM_REQUEST_HANDLER)
                .evaluateAttributeExpressions(flowFileResponse).getValue());

        if (context.getProperty(SOLR_PARAM_FIELD_LIST).isSet()) {
            for (final String field : context.getProperty(SOLR_PARAM_FIELD_LIST)
                    .evaluateAttributeExpressions(flowFileResponse).getValue().split(",")) {
                solrQuery.addField(field.trim());
            }
        }

        // Avoid ArrayIndexOutOfBoundsException due to incorrectly configured sorting
        try {
            if (context.getProperty(SOLR_PARAM_SORT).isSet()) {
                final List<SolrQuery.SortClause> sortings = new ArrayList<>();
                for (final String sorting : context.getProperty(SOLR_PARAM_SORT)
                        .evaluateAttributeExpressions(flowFileResponse).getValue().split(",")) {
                    final String[] sortEntry = sorting.trim().split(" ");
                    sortings.add(new SolrQuery.SortClause(sortEntry[0], sortEntry[1]));
                }
                solrQuery.setSorts(sortings);
            }
        } catch (Exception e) {
            throw new ProcessException("Error while parsing the sort clauses for the Solr query");
        }

        final Integer startParam = context.getProperty(SOLR_PARAM_START).isSet()
                ? Integer.parseInt(context.getProperty(SOLR_PARAM_START)
                        .evaluateAttributeExpressions(flowFileResponse).getValue())
                : CommonParams.START_DEFAULT;

        solrQuery.setStart(startParam);

        final Integer rowParam = context.getProperty(SOLR_PARAM_ROWS).isSet()
                ? Integer.parseInt(context.getProperty(SOLR_PARAM_ROWS)
                        .evaluateAttributeExpressions(flowFileResponse).getValue())
                : CommonParams.ROWS_DEFAULT;

        solrQuery.setRows(rowParam);

        final Map<String, String[]> additionalSolrParams = SolrUtils.getRequestParams(context,
                flowFileResponse);

        final Set<String> searchComponents = extractSearchComponents(additionalSolrParams);
        solrQuery.add(new MultiMapSolrParams(additionalSolrParams));

        final Map<String, String> attributes = new HashMap<>();
        attributes.put(ATTRIBUTE_SOLR_CONNECT, getSolrLocation());
        if (isSolrCloud) {
            attributes.put(ATTRIBUTE_SOLR_COLLECTION, collection);
        }
        attributes.put(ATTRIBUTE_SOLR_QUERY, solrQuery.toString());
        if (flowFileOriginal != null) {
            flowFileOriginal = session.putAllAttributes(flowFileOriginal, attributes);
        }

        flowFileResponse = session.putAllAttributes(flowFileResponse, attributes);

        final boolean getEntireResults = RETURN_ALL_RESULTS
                .equals(context.getProperty(AMOUNT_DOCUMENTS_TO_RETURN).getValue());
        boolean processFacetsAndStats = true;
        boolean continuePaging = true;

        while (continuePaging) {

            timer.start();

            Map<String, String> responseAttributes = new HashMap<>();
            responseAttributes.put(ATTRIBUTE_SOLR_START, solrQuery.getStart().toString());
            responseAttributes.put(ATTRIBUTE_SOLR_ROWS, solrQuery.getRows().toString());

            if (solrQuery.getStart() > UPPER_LIMIT_START_PARAM) {
                logger.warn(
                        "The start parameter of Solr query {} exceeded the upper limit of {}. The query will not be processed "
                                + "to avoid performance or memory issues on the part of Solr.",
                        new Object[] { solrQuery.toString(), UPPER_LIMIT_START_PARAM });
                flowFileResponse = session.putAllAttributes(flowFileResponse, responseAttributes);
                timer.stop();
                break;
            }

            final QueryRequest req = new QueryRequest(solrQuery);
            if (isBasicAuthEnabled()) {
                req.setBasicAuthCredentials(getUsername(), getPassword());
            }

            final QueryResponse response = req.process(getSolrClient());
            timer.stop();

            final Long totalNumberOfResults = response.getResults().getNumFound();

            responseAttributes.put(ATTRIBUTE_SOLR_NUMBER_RESULTS, totalNumberOfResults.toString());
            responseAttributes.put(ATTRIBUTE_CURSOR_MARK, response.getNextCursorMark());
            responseAttributes.put(ATTRIBUTE_SOLR_STATUS, String.valueOf(response.getStatus()));
            responseAttributes.put(ATTRIBUTE_QUERY_TIME, String.valueOf(response.getQTime()));
            flowFileResponse = session.putAllAttributes(flowFileResponse, responseAttributes);

            if (response.getResults().size() > 0) {

                if (context.getProperty(RETURN_TYPE).getValue().equals(MODE_XML.getValue())) {
                    flowFileResponse = session.write(flowFileResponse,
                            SolrUtils.getOutputStreamCallbackToTransformSolrResponseToXml(response));
                    flowFileResponse = session.putAttribute(flowFileResponse, CoreAttributes.MIME_TYPE.key(),
                            MIME_TYPE_XML);
                } else {
                    final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER)
                            .evaluateAttributeExpressions(flowFileResponse)
                            .asControllerService(RecordSetWriterFactory.class);
                    final RecordSchema schema = writerFactory.getSchema(flowFileResponse.getAttributes(), null);
                    final RecordSet recordSet = SolrUtils.solrDocumentsToRecordSet(response.getResults(),
                            schema);
                    final StringBuffer mimeType = new StringBuffer();
                    final FlowFile flowFileResponseRef = flowFileResponse;
                    flowFileResponse = session.write(flowFileResponse, out -> {
                        try (final RecordSetWriter writer = writerFactory.createWriter(getLogger(), schema, out,
                                flowFileResponseRef)) {
                            writer.write(recordSet);
                            writer.flush();
                            mimeType.append(writer.getMimeType());
                        } catch (SchemaNotFoundException e) {
                            throw new ProcessException("Could not parse Solr response", e);
                        }
                    });
                    flowFileResponse = session.putAttribute(flowFileResponse, CoreAttributes.MIME_TYPE.key(),
                            mimeType.toString());
                }

                if (processFacetsAndStats) {
                    if (searchComponents.contains(FacetParams.FACET)) {
                        FlowFile flowFileFacets = session.create(flowFileResponse);
                        flowFileFacets = session.write(flowFileFacets, out -> {
                            try (final OutputStreamWriter osw = new OutputStreamWriter(out);
                                    final JsonWriter writer = new JsonWriter(osw)) {
                                addFacetsFromSolrResponseToJsonWriter(response, writer);
                            }
                        });
                        flowFileFacets = session.putAttribute(flowFileFacets, CoreAttributes.MIME_TYPE.key(),
                                MIME_TYPE_JSON);
                        session.getProvenanceReporter().receive(flowFileFacets, transitUri.toString(),
                                timer.getDuration(TimeUnit.MILLISECONDS));
                        session.transfer(flowFileFacets, FACETS);
                    }

                    if (searchComponents.contains(StatsParams.STATS)) {
                        FlowFile flowFileStats = session.create(flowFileResponse);
                        flowFileStats = session.write(flowFileStats, out -> {
                            try (final OutputStreamWriter osw = new OutputStreamWriter(out);
                                    final JsonWriter writer = new JsonWriter(osw)) {
                                addStatsFromSolrResponseToJsonWriter(response, writer);
                            }
                        });
                        flowFileStats = session.putAttribute(flowFileStats, CoreAttributes.MIME_TYPE.key(),
                                MIME_TYPE_JSON);
                        session.getProvenanceReporter().receive(flowFileStats, transitUri.toString(),
                                timer.getDuration(TimeUnit.MILLISECONDS));
                        session.transfer(flowFileStats, STATS);
                    }
                    processFacetsAndStats = false;
                }
            }

            if (getEntireResults) {
                final Integer totalDocumentsReturned = solrQuery.getStart() + solrQuery.getRows();
                if (totalDocumentsReturned < totalNumberOfResults) {
                    solrQuery.setStart(totalDocumentsReturned);
                    session.getProvenanceReporter().receive(flowFileResponse, transitUri.toString(),
                            timer.getDuration(TimeUnit.MILLISECONDS));
                    session.transfer(flowFileResponse, RESULTS);
                    flowFileResponse = session.create(flowFileResponse);
                } else {
                    continuePaging = false;
                }
            } else {
                continuePaging = false;
            }
        }

    } catch (Exception e) {
        flowFileResponse = session.penalize(flowFileResponse);
        flowFileResponse = session.putAttribute(flowFileResponse, EXCEPTION, e.getClass().getName());
        flowFileResponse = session.putAttribute(flowFileResponse, EXCEPTION_MESSAGE, e.getMessage());
        session.transfer(flowFileResponse, FAILURE);
        logger.error("Failed to execute query {} due to {}. FlowFile will be routed to relationship failure",
                new Object[] { solrQuery.toString(), e }, e);
        if (flowFileOriginal != null) {
            flowFileOriginal = session.penalize(flowFileOriginal);
        }
    }

    if (!flowFileResponse.isPenalized()) {
        session.getProvenanceReporter().receive(flowFileResponse, transitUri.toString(),
                timer.getDuration(TimeUnit.MILLISECONDS));
        session.transfer(flowFileResponse, RESULTS);
    }

    if (flowFileOriginal != null) {
        if (!flowFileOriginal.isPenalized()) {
            session.transfer(flowFileOriginal, ORIGINAL);
        } else {
            session.remove(flowFileOriginal);
        }
    }
}

From source file:org.apache.nifi.processors.solr.RequestParamsUtil.java

License:Apache License

/**
 * Parses a String of request params into a MultiMap.
 *
 * @param requestParams// w  w w.j a v  a 2s  . com
 *          the value of the request params property
 * @return
 */
public static MultiMapSolrParams parse(final String requestParams) {
    final Map<String, String[]> paramsMap = new HashMap<>();
    if (requestParams == null || requestParams.trim().isEmpty()) {
        return new MultiMapSolrParams(paramsMap);
    }

    final String[] params = requestParams.split("[&]");
    if (params == null || params.length == 0) {
        throw new IllegalStateException("Parameters must be in form k1=v1&k2=v2, was" + requestParams);
    }

    for (final String param : params) {
        final String[] keyVal = param.split("=");
        if (keyVal.length != 2) {
            throw new IllegalStateException("Parameter must be in form key=value, was " + param);
        }

        final String key = keyVal[0].trim();
        final String val = keyVal[1].trim();
        MultiMapSolrParams.addParam(key, val, paramsMap);
    }

    return new MultiMapSolrParams(paramsMap);
}

From source file:uk.co.flax.biosolr.solr.update.processor.OntologyUpdateProcessorFactoryTest.java

License:Apache License

static void addDoc(String doc, String chain) throws Exception {
    Map<String, String[]> params = new HashMap<>();
    MultiMapSolrParams mmparams = new MultiMapSolrParams(params);
    params.put(UpdateParams.UPDATE_CHAIN, new String[] { chain });
    SolrQueryRequestBase req = new SolrQueryRequestBase(h.getCore(), mmparams) {
    };/*ww w .  j  ava2  s. co m*/

    UpdateRequestHandler handler = new UpdateRequestHandler();
    handler.init(null);
    ArrayList<ContentStream> streams = new ArrayList<>(2);
    streams.add(new ContentStreamBase.StringStream(doc));
    req.setContentStreams(streams);
    handler.handleRequestBody(req, new SolrQueryResponse());
    req.close();
}