Example usage for org.apache.solr.client.solrj.request ContentStreamUpdateRequest setParam

List of usage examples for org.apache.solr.client.solrj.request ContentStreamUpdateRequest setParam

Introduction

In this page you can find the example usage for org.apache.solr.client.solrj.request ContentStreamUpdateRequest setParam.

Prototype

public void setParam(String param, String value) 

Source Link

Usage

From source file:actors.SolrActor.java

License:Apache License

public void indexUpdated(SolrIndexEvent msg) {
    try {/*from   www.  j a v a 2 s .  c  om*/
        System.out.println("SolrIndexEvent");
        SolrInputDocument doc = msg.getDocuement();
        //Making realtime GET
        System.out.println("GET");
        SolrQuery parameters = new SolrQuery();
        parameters.setRequestHandler("/get");
        String f1 = doc.getFieldValue("literal.id").toString();
        String f2 = doc.getFieldValue("literal.rev").toString();
        parameters.set("id", f1);
        parameters.set("rev", f2);
        //System.out.println(parameters);

        QueryResponse response = server.query(parameters);

        NamedList<Object> result = response.getResponse();
        //System.out.println(response.getResponse());
        //System.out.println(result.size() );
        //System.out.println();
        //System.out.println(result);
        //validate the doc exists
        if (result == null || result.get("doc") == null) {
            System.out.println("/update/extract");
            ContentStreamUpdateRequest req = new ContentStreamUpdateRequest("/update/extract");
            // url dropbox
            URL url = new URL(doc.getFieldValue("literal.links").toString());
            ContentStreamBase content = new ContentStreamBase.URLStream(url);
            System.out.println("ContentStreamBase");
            req.addContentStream(content);
            // Adittionall metadata
            req.setParam("literal.id", doc.getFieldValue("literal.id").toString());
            req.setParam("literal.title", doc.getFieldValue("literal.title").toString());
            req.setParam("literal.rev", doc.getFieldValue("literal.rev").toString());
            req.setParam("literal.when", doc.getFieldValue("literal.when").toString());
            req.setParam("literal.path", doc.getFieldValue("literal.path").toString());
            req.setParam("literal.icon", doc.getFieldValue("literal.icon").toString());
            req.setParam("literal.size", doc.getFieldValue("literal.size").toString());
            req.setParam("literal.url", doc.getFieldValue("literal.links").toString());

            req.setParam("uprefix", "attr_");
            req.setParam("fmap.content", "attr_content");
            req.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            //Requesting Solr
            result = server.request(req);
            //System.out.println("Result: " + result.toString());

        } else {
            System.out.println("It's already update");

        }

    } catch (Exception e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

From source file:at.kc.tugraz.ss.service.solr.impl.SSSolrImpl.java

License:Apache License

@Override
public void solrAddDoc(final SSServPar parA) throws Exception {

    //    according to Solr specification by adding a document with an ID already
    //     existing in the index will replace the document (eg. refer to 
    //     http://stackoverflow.com/questions/8494923/solr-block-updating-of-existing-document or
    //     http://lucene.apache.org/solr/api-4_0_0-ALPHA/doc-files/tutorial.html ) 

    try {/*  w  ww .java2s  . c om*/
        final SSSolrAddDocPar par = new SSSolrAddDocPar(parA);
        final ContentStreamUpdateRequest csur = new ContentStreamUpdateRequest("/update/extract");
        final NamedList<Object> response;

        csur.addContentStream(new ContentStreamBase.FileStream(new File(localWorkPath + par.id)));

        csur.setParam("literal.id", par.id);
        csur.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);

        response = solrUpdater.request(csur);

        SSLogU.info("document w/ id " + par.id + " added successfully. ");
    } catch (Exception error) {
        SSServErrReg.regErrThrow(error);
    }
}

From source file:at.tugraz.sss.servs.db.impl.SSDBNoSQLSolrImpl.java

License:Apache License

@Override
public void addDoc(final SSDBNoSQLAddDocPar par) throws SSErr {

    //    according to Solr specification by adding a document with an ID already
    //     existing in the index will replace the document (eg. refer to 
    //     http://stackoverflow.com/questions/8494923/solr-block-updating-of-existing-document or
    //     http://lucene.apache.org/solr/api-4_0_0-ALPHA/doc-files/tutorial.html ) 

    try {//  www .  ja  v  a2  s  .  c o m
        final ContentStreamUpdateRequest csur = new ContentStreamUpdateRequest("/update/extract");
        final NamedList<Object> response;

        csur.addContentStream(new ContentStreamBase.FileStream(new File(SSConf.getLocalWorkPath() + par.id)));

        csur.setParam("literal.id", par.id);
        //      csur.setParam  ("stream.type", "application/octet-stream");

        csur.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);

        response = solrServer.request(csur);

        SSLogU.info("document w/ id " + par.id + " added successfully. ");
    } catch (Exception error) {
        SSServErrReg.regErrThrow(error);
    }
}

From source file:kesako.watcher.runnable.IndexFileProcess.java

License:Apache License

/**
 * Initialize a ContentStreamUpdateRequest object.
 * @param up object to initialize/*from   w ww. j av a2s  . com*/
 * @throws IOException
 * @throws SQLException
 */
private void initContentStream(Connection cn, ContentStreamUpdateRequest up, int idFichier, File fToIndex)
        throws IOException, SQLException {
    Date d;
    Calendar c = Calendar.getInstance();
    String titreDoc;
    String sqlQuery;
    ResultSet rs;

    up.addFile(fToIndex);

    sqlQuery = "select t2.nom as sourceName,t2.chemin as sourceURI,t1.Titre_F,t1.author_F,t1.DATEEXTRACTED,t1.titre_doc from t_fichiers t1 "
            + "join t_sources t2 on t1.id_source=t2.id_source " + " where id_Fichier=" + idFichier;
    rs = DBUtilities.executeQuery(cn, sqlQuery);
    if (rs.next()) {
        up.setParam("literal.folder_name", rs.getString("sourceName"));
        up.setParam("literal.folder_uri", rs.getString("sourceURI"));
        up.setParam("literal.file_uri", fToIndex.getAbsolutePath());
        d = new Date(fToIndex.lastModified());
        c.setTime(d);
        up.setParam("literal.modif_date",
                c.get(Calendar.YEAR) + "-" + (c.get(Calendar.MONTH) + 1) + "-" + c.get(Calendar.DAY_OF_MONTH));

        if (rs.getString("Titre_F").equals("")) {
            up.setParam("literal.titre_f", "<no title>");
        } else {
            up.setParam("literal.titre_f", rs.getString("Titre_F"));
        }
        up.setParam("literal.author_f", rs.getString("author_F"));
        up.setParam("literal.extract_date", rs.getString("DATEEXTRACTED"));
        titreDoc = rs.getString("titre_doc");
        if (!titreDoc.trim().equals("")) {
            up.setParam("literal.titre_doc", rs.getString("Titre_doc"));
        }
    } else {
        logger.fatal("The file " + fToIndex.getAbsolutePath() + " doesn't exist in the data-base. Query: "
                + sqlQuery);
    }
    sqlQuery = "select * from t_metas where id_fichier=" + idFichier;
    rs = DBUtilities.executeQuery(cn, sqlQuery);
    while (rs.next()) {
        up.setParam("literal." + rs.getString("nom"), rs.getString("value"));
    }
}

From source file:net.yacy.cora.federate.solr.connector.SolrServerConnector.java

License:Open Source License

public void add(final File file, final String solrId) throws IOException {
    final ContentStreamUpdateRequest up = new ContentStreamUpdateRequest("/update/extract");
    up.addFile(file, "application/octet-stream");
    up.setParam("literal.id", solrId);
    up.setParam("uprefix", "attr_");
    up.setParam("fmap.content", "attr_content");
    up.setCommitWithin(-1);//  w w  w.  j a v a2s.  c  om
    //up.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
    try {
        this.server.request(up);
    } catch (final Throwable e) {
        clearCaches(); // prevent further OOM if this was caused by OOM
        throw new IOException(e);
    }
}

From source file:org.apache.camel.component.solr.SolrProducer.java

License:Apache License

private void insert(Exchange exchange, boolean isStreaming) throws Exception {
    Object body = exchange.getIn().getBody();
    if (body instanceof WrappedFile) {
        body = ((WrappedFile<?>) body).getFile();
    }//from  w w  w .j  a va 2 s.co m

    if (body instanceof File) {
        MimetypesFileTypeMap mimeTypesMap = new MimetypesFileTypeMap();
        String mimeType = mimeTypesMap.getContentType((File) body);
        ContentStreamUpdateRequest updateRequest = new ContentStreamUpdateRequest(getRequestHandler());
        updateRequest.addFile((File) body, mimeType);

        for (Map.Entry<String, Object> entry : exchange.getIn().getHeaders().entrySet()) {
            if (entry.getKey().startsWith(SolrConstants.PARAM)) {
                String paramName = entry.getKey().substring(SolrConstants.PARAM.length());
                updateRequest.setParam(paramName, entry.getValue().toString());
            }
        }

        if (isStreaming) {
            updateRequest.process(streamingSolrServer);
        } else {
            updateRequest.process(solrServer);
        }

    } else if (body instanceof SolrInputDocument) {

        UpdateRequest updateRequest = new UpdateRequest(getRequestHandler());
        updateRequest.add((SolrInputDocument) body);

        if (isStreaming) {
            updateRequest.process(streamingSolrServer);
        } else {
            updateRequest.process(solrServer);
        }

    } else {

        boolean hasSolrHeaders = false;
        for (Map.Entry<String, Object> entry : exchange.getIn().getHeaders().entrySet()) {
            if (entry.getKey().startsWith(SolrConstants.FIELD)) {
                hasSolrHeaders = true;
                break;
            }
        }

        if (hasSolrHeaders) {

            UpdateRequest updateRequest = new UpdateRequest(getRequestHandler());

            SolrInputDocument doc = new SolrInputDocument();
            for (Map.Entry<String, Object> entry : exchange.getIn().getHeaders().entrySet()) {
                if (entry.getKey().startsWith(SolrConstants.FIELD)) {
                    String fieldName = entry.getKey().substring(SolrConstants.FIELD.length());
                    doc.setField(fieldName, entry.getValue());
                }
            }
            updateRequest.add(doc);

            if (isStreaming) {
                updateRequest.process(streamingSolrServer);
            } else {
                updateRequest.process(solrServer);
            }

        } else if (body instanceof String) {

            String bodyAsString = (String) body;

            if (!bodyAsString.startsWith("<add")) {
                bodyAsString = "<add>" + bodyAsString + "</add>";
            }

            DirectXmlRequest xmlRequest = new DirectXmlRequest(getRequestHandler(), bodyAsString);

            if (isStreaming) {
                streamingSolrServer.request(xmlRequest);
            } else {
                solrServer.request(xmlRequest);
            }
        } else {
            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                    "unable to find data in Exchange to update Solr");
        }
    }
}

From source file:org.apache.nifi.processors.solr.PutSolrContentStream.java

License:Apache License

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();// w ww .  j a  v a  2s  .c  o m
    if (flowFile == null) {
        return;
    }

    final AtomicReference<Exception> error = new AtomicReference<>(null);
    final AtomicReference<Exception> connectionError = new AtomicReference<>(null);

    final boolean isSolrCloud = SOLR_TYPE_CLOUD.equals(context.getProperty(SOLR_TYPE).getValue());
    final String collection = context.getProperty(COLLECTION).evaluateAttributeExpressions(flowFile).getValue();
    final Long commitWithin = context.getProperty(COMMIT_WITHIN).evaluateAttributeExpressions(flowFile)
            .asLong();
    final String contentStreamPath = context.getProperty(CONTENT_STREAM_PATH)
            .evaluateAttributeExpressions(flowFile).getValue();
    final MultiMapSolrParams requestParams = new MultiMapSolrParams(getRequestParams(context, flowFile));

    StopWatch timer = new StopWatch(true);
    session.read(flowFile, new InputStreamCallback() {
        @Override
        public void process(final InputStream in) throws IOException {
            ContentStreamUpdateRequest request = new ContentStreamUpdateRequest(contentStreamPath);
            request.setParams(new ModifiableSolrParams());

            // add the extra params, don't use 'set' in case of repeating params
            Iterator<String> paramNames = requestParams.getParameterNamesIterator();
            while (paramNames.hasNext()) {
                String paramName = paramNames.next();
                for (String paramValue : requestParams.getParams(paramName)) {
                    request.getParams().add(paramName, paramValue);
                }
            }

            // specify the collection for SolrCloud
            if (isSolrCloud) {
                request.setParam(COLLECTION_PARAM_NAME, collection);
            }

            if (commitWithin != null && commitWithin > 0) {
                request.setParam(COMMIT_WITHIN_PARAM_NAME, commitWithin.toString());
            }

            // if a username and password were provided then pass them for basic auth
            if (isBasicAuthEnabled()) {
                request.setBasicAuthCredentials(getUsername(), getPassword());
            }

            try (final BufferedInputStream bufferedIn = new BufferedInputStream(in)) {
                // add the FlowFile's content on the UpdateRequest
                request.addContentStream(new ContentStreamBase() {
                    @Override
                    public InputStream getStream() throws IOException {
                        return bufferedIn;
                    }

                    @Override
                    public String getContentType() {
                        return context.getProperty(CONTENT_TYPE).evaluateAttributeExpressions().getValue();
                    }
                });

                UpdateResponse response = request.process(getSolrClient());
                getLogger().debug("Got {} response from Solr", new Object[] { response.getStatus() });
            } catch (SolrException e) {
                error.set(e);
            } catch (SolrServerException e) {
                if (causedByIOException(e)) {
                    connectionError.set(e);
                } else {
                    error.set(e);
                }
            } catch (IOException e) {
                connectionError.set(e);
            }
        }
    });
    timer.stop();

    if (error.get() != null) {
        getLogger().error("Failed to send {} to Solr due to {}; routing to failure",
                new Object[] { flowFile, error.get() });
        session.transfer(flowFile, REL_FAILURE);
    } else if (connectionError.get() != null) {
        getLogger().error("Failed to send {} to Solr due to {}; routing to connection_failure",
                new Object[] { flowFile, connectionError.get() });
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_CONNECTION_FAILURE);
    } else {
        StringBuilder transitUri = new StringBuilder("solr://");
        transitUri.append(getSolrLocation());
        if (isSolrCloud) {
            transitUri.append(":").append(collection);
        }

        final long duration = timer.getDuration(TimeUnit.MILLISECONDS);
        session.getProvenanceReporter().send(flowFile, transitUri.toString(), duration, true);
        getLogger().info("Successfully sent {} to Solr in {} millis", new Object[] { flowFile, duration });
        session.transfer(flowFile, REL_SUCCESS);
    }
}

From source file:org.craftercms.search.service.impl.SolrDocumentBuilder.java

License:Open Source License

/**
 * Build the Solr document for partial update of the search engine's index data of a structured document.
 *
 * @param request                Content Stream update request for document update
 * @param additionalFields       Fields to add to solr document
 * @return the Solr document/*from   www. ja va  2s.com*/
 *
 */
public ContentStreamUpdateRequest buildPartialUpdateDocument(ContentStreamUpdateRequest request,
        Map<String, String> additionalFields) {

    for (Map.Entry<String, String> additionalField : additionalFields.entrySet()) {
        String fieldName = additionalField.getKey();

        String fieldValue = additionalField.getValue();
        // If fieldName ends with HTML prefix, strip all HTML markup from the field value.
        if (fieldName.endsWith(htmlFieldSuffix)) {
            if (logger.isDebugEnabled()) {
                logger.debug("Stripping HTML from field '" + fieldName + "'");
            }

            fieldValue = stripHtml(fieldName, fieldValue);
        }
        // If fieldName ends with datetime prefix, convert the field value to an ISO datetime string.
        if (fieldName.endsWith(dateTimeFieldSuffix)) {
            if (logger.isDebugEnabled()) {
                logger.debug("Converting '" + fieldValue + "' to ISO datetime");
            }

            fieldValue = convertToISODateTimeString(fieldValue);
        }

        if (logger.isDebugEnabled()) {
            logger.debug("Adding field '" + fieldName + "' to the Solr doc");
        }
        if (fieldName.endsWith(htmlFieldSuffix) || fieldName.endsWith(dateTimeFieldSuffix)) {
            request.setParam(ExtractingParams.LITERALS_PREFIX + fieldName, fieldValue);
        } else {
            String[] fieldValues = fieldValue.split(multivalueSeparator);
            if (fieldValues.length > 1) {
                ModifiableSolrParams params = request.getParams();
                params.add(ExtractingParams.LITERALS_PREFIX + fieldName, fieldValues);
            } else {
                request.setParam(ExtractingParams.LITERALS_PREFIX + fieldName, fieldValue);
            }
        }

    }
    return request;
}

From source file:org.dspace.discovery.SolrServiceImpl.java

License:BSD License

/**
 * Write the document to the index under the appropriate handle.
 *
 * @param doc the solr document to be written to the server
 * @param streams/*from  w w  w.  j  ava 2s  . c  o m*/
 * @throws IOException IO exception
 */
protected void writeDocument(SolrInputDocument doc, List<BitstreamContentStream> streams) throws IOException {

    try {
        if (getSolr() != null) {
            if (CollectionUtils.isNotEmpty(streams)) {
                ContentStreamUpdateRequest req = new ContentStreamUpdateRequest("/update/extract");

                for (BitstreamContentStream bce : streams) {
                    req.addContentStream(bce);
                }

                ModifiableSolrParams params = new ModifiableSolrParams();

                //req.setParam(ExtractingParams.EXTRACT_ONLY, "true");
                for (String name : doc.getFieldNames()) {
                    for (Object val : doc.getFieldValues(name)) {
                        params.add(ExtractingParams.LITERALS_PREFIX + name, val.toString());
                    }
                }

                req.setParams(params);
                req.setParam(ExtractingParams.UNKNOWN_FIELD_PREFIX, "attr_");
                req.setParam(ExtractingParams.MAP_PREFIX + "content", "fulltext");
                req.setParam(ExtractingParams.EXTRACT_FORMAT, "text");
                req.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
                req.process(getSolr());
            } else {
                getSolr().add(doc);
            }
        }
    } catch (SolrServerException e) {
        log.error(e.getMessage(), e);
    }
}

From source file:org.dspace.statistics.SolrLogger.java

License:BSD License

public static void shardSolrIndex() throws IOException, SolrServerException {
    /*//from w  w  w. j  av  a  2 s  . c  o m
    Start by faceting by year so we can include each year in a separate core !
     */
    SolrQuery yearRangeQuery = new SolrQuery();
    yearRangeQuery.setQuery("*:*");
    yearRangeQuery.setRows(0);
    yearRangeQuery.setFacet(true);
    yearRangeQuery.add(FacetParams.FACET_RANGE, "time");
    //We go back to 2000 the year 2000, this is a bit overkill but this way we ensure we have everything
    //The alternative would be to sort but that isn't recommended since it would be a very costly query !
    yearRangeQuery.add(FacetParams.FACET_RANGE_START,
            "NOW/YEAR-" + (Calendar.getInstance().get(Calendar.YEAR) - 2000) + "YEARS");
    //Add the +0year to ensure that we DO NOT include the current year
    yearRangeQuery.add(FacetParams.FACET_RANGE_END, "NOW/YEAR+0YEARS");
    yearRangeQuery.add(FacetParams.FACET_RANGE_GAP, "+1YEAR");
    yearRangeQuery.add(FacetParams.FACET_MINCOUNT, String.valueOf(1));

    //Create a temp directory to store our files in !
    File tempDirectory = new File(
            ConfigurationManager.getProperty("dspace.dir") + File.separator + "temp" + File.separator);
    tempDirectory.mkdirs();

    QueryResponse queryResponse = solr.query(yearRangeQuery);
    //We only have one range query !
    List<RangeFacet.Count> yearResults = queryResponse.getFacetRanges().get(0).getCounts();
    for (RangeFacet.Count count : yearResults) {
        long totalRecords = count.getCount();

        //Create a range query from this !
        //We start with out current year
        DCDate dcStart = new DCDate(count.getValue());
        Calendar endDate = Calendar.getInstance();
        //Advance one year for the start of the next one !
        endDate.setTime(dcStart.toDate());
        endDate.add(Calendar.YEAR, 1);
        DCDate dcEndDate = new DCDate(endDate.getTime());

        StringBuilder filterQuery = new StringBuilder();
        filterQuery.append("time:([");
        filterQuery.append(ClientUtils.escapeQueryChars(dcStart.toString()));
        filterQuery.append(" TO ");
        filterQuery.append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append("]");
        //The next part of the filter query excludes the content from midnight of the next year !
        filterQuery.append(" NOT ").append(ClientUtils.escapeQueryChars(dcEndDate.toString()));
        filterQuery.append(")");

        Map<String, String> yearQueryParams = new HashMap<String, String>();
        yearQueryParams.put(CommonParams.Q, "*:*");
        yearQueryParams.put(CommonParams.ROWS, String.valueOf(10000));
        yearQueryParams.put(CommonParams.FQ, filterQuery.toString());
        yearQueryParams.put(CommonParams.WT, "csv");

        //Start by creating a new core
        String coreName = "statistics-" + dcStart.getYear();
        HttpSolrServer statisticsYearServer = createCore(solr, coreName);

        System.out.println("Moving: " + totalRecords + " into core " + coreName);
        log.info("Moving: " + totalRecords + " records into core " + coreName);

        List<File> filesToUpload = new ArrayList<File>();
        for (int i = 0; i < totalRecords; i += 10000) {
            String solrRequestUrl = solr.getBaseURL() + "/select";
            solrRequestUrl = generateURL(solrRequestUrl, yearQueryParams);

            HttpGet get = new HttpGet(solrRequestUrl);
            HttpResponse response = new DefaultHttpClient().execute(get);
            InputStream csvInputstream = response.getEntity().getContent();
            //Write the csv ouput to a file !
            File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYear()
                    + "." + i + ".csv");
            FileUtils.copyInputStreamToFile(csvInputstream, csvFile);
            filesToUpload.add(csvFile);

            //Add 10000 & start over again
            yearQueryParams.put(CommonParams.START, String.valueOf((i + 10000)));
        }

        for (File tempCsv : filesToUpload) {
            //Upload the data in the csv files to our new solr core
            ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest(
                    "/update/csv");
            contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
            contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
            contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");

            statisticsYearServer.request(contentStreamUpdateRequest);
        }
        statisticsYearServer.commit(true, true);

        //Delete contents of this year from our year query !
        solr.deleteByQuery(filterQuery.toString());
        solr.commit(true, true);

        log.info("Moved " + totalRecords + " records into core: " + coreName);
    }

    FileUtils.deleteDirectory(tempDirectory);
}