Example usage for org.apache.solr.client.solrj.response UpdateResponse getElapsedTime

List of usage examples for org.apache.solr.client.solrj.response UpdateResponse getElapsedTime

Introduction

In this page you can find the example usage for org.apache.solr.client.solrj.response UpdateResponse getElapsedTime.

Prototype

@Override
    public long getElapsedTime() 

Source Link

Usage

From source file:com.datasalt.pangool.solr.BatchWriter.java

License:Apache License

protected UpdateResponse runUpdate(List<SolrInputDocument> batchToWrite) {
    try {/*from  ww w .ja v a2  s  .c  o  m*/
        UpdateResponse result = solr.add(batchToWrite);
        SolrRecordWriter.incrementCounter(taskId, "SolrRecordWriter", "BatchesWritten", 1);
        SolrRecordWriter.incrementCounter(taskId, "SolrRecordWriter", "DocumentsWritten", batchToWrite.size());
        SolrRecordWriter.incrementCounter(taskId, "SolrRecordWriter", "BatchesWriteTime",
                result.getElapsedTime());
        return result;
    } catch (Throwable e) {
        SolrRecordWriter.incrementCounter(taskId, "SolrRecordWriter", e.getClass().getName(), 1);
        if (e instanceof Exception) {
            setBatchWriteException((Exception) e);
        } else {
            setBatchWriteException(new Exception(e));
        }
        return null;
    }
}

From source file:cz.zcu.kiv.eegdatabase.logic.indexing.Indexer.java

License:Apache License

/**
 * Logs information from the response obtained from the Solr server.
 * @param response//from   w  w  w .  ja  va 2 s. co m
 * @throws IOException
 * @throws SolrServerException
 */
protected void logCommitResponse(UpdateResponse response) throws IOException, SolrServerException {

    long time = response.getElapsedTime();
    int status = response.getStatus();
    log.debug("Time elapsed: " + time + ", status code: " + status);
}

From source file:ddf.catalog.cache.solr.impl.UninitializedSolrClientAdaptorTest.java

License:Open Source License

@Test
public void deleteByQuery() throws Exception {
    UpdateResponse response = uninitializedSolrClientAdaptor.deleteByQuery("test-delete-by-query-string");

    assertThat(response.getElapsedTime(), is(equalTo(0L)));
    assertThat(response.getRequestUrl(), is(equalTo("")));
    assertThat(response.getResponse().size(), is(equalTo(0)));
}

From source file:de.qaware.chronix.solr.client.add.SolrAddingService.java

License:Apache License

private static boolean evaluate(UpdateResponse response) {
    LOGGER.debug("Response returned: Status code {}, Elapsed time {}, QTime {}", response.getStatus(),
            response.getElapsedTime(), response.getQTime());
    //any other status code means 'there was an error'
    return response.getStatus() == 0;
}

From source file:eumetsat.pn.solr.SolrFeeder.java

@Override
protected void indexDirContent(Path aSrcDir) {
    log.info("Indexing dir content {}", aSrcDir);

    JSONParser parser = new JSONParser();

    YamlNode endpointConfig = this.config.get("endpoint");
    String collection = endpointConfig.get("collection").asTextValue();

    SolrServer solr;/*from w  w w. j  ava  2s. c  om*/
    if (this.server != null) {
        solr = server;
        log.info("Using embedded SolrServer: {}", solr);
    } else {
        log.info("Endpoint configuration: {}", endpointConfig);

        String solrEndpoint = endpointConfig.get("url").asTextValue();
        solr = new ConcurrentUpdateSolrServer(solrEndpoint + "/" + collection, 10, 1);
        //        CloudSolrServer solr = new CloudSolrServer(solrEndpoint);
        //        solr.setDefaultCollection(collection);
        log.info("Using HTTP SolrServer: {}", solr);
    }

    SolrPingResponse ping;
    try {
        ping = solr.ping();
        log.debug("Pinged Solr server: {}", ping);
    } catch (SolrServerException | IOException | RemoteSolrException e) {
        log.error("Could not ping Solr server", e);
    }

    //        Settings settings = ImmutableSettings.settingsBuilder().put("cluster.name", endpointConfig.get("cluster.name").asTextValue()).build();
    int cpt = 0;
    Collection<File> inputFiles = FileUtils.listFiles(aSrcDir.toFile(), new String[] { "json" }, false);
    log.info("Indexing {} files...", inputFiles.size());

    for (File file : inputFiles) {
        try {
            String jsonStr = FileUtils.readFileToString(file);
            JSONObject jsObj = (JSONObject) parser.parse(jsonStr);

            try {
                SolrInputDocument input = createInputDoc(jsObj);

                log.debug("Adding {} to collection {}", file.getName(), collection);
                log.trace("Full json of {}: {}", file.getName(), input);
                solr.add(input);

                cpt++;
            } catch (RuntimeException e) {
                log.error("Error processing input document {}: {}, {}", file, e, e.getMessage());
            }

            if (cpt % 42 == 0) { // periodically flush
                log.info("Commiting to server, document count is {}", cpt);
                UpdateResponse response = solr.commit();
                log.info("Response status: {} (time: {}): {}", response.getStatus(), response.getElapsedTime(),
                        response.toString());
            }

        } catch (IOException | ParseException | SolrServerException e) {
            log.error("Error comitting document based on file {}", file, e);
        }
    }

    try {
        solr.commit();
    } catch (IOException | SolrServerException e) {
        log.error("Error comitting document", e);
    }

    solr.shutdown();

    log.info("Indexed {} of {} files.", cpt, inputFiles.size());
}

From source file:io.confluent.connect.solr.sink.SolrInputDocumentHandler.java

License:Apache License

public void flush() throws IOException, SolrServerException {
    int documentsToFlush = this.updateRequest.getDocuments().size();

    if (log.isDebugEnabled()) {
        log.debug("Writing {} document(s) for topic '{}' to solr.", documentsToFlush, this.topic);
    }/* w ww .  java 2s  . c o  m*/

    beforeFlush(this.updateRequest);
    UpdateResponse updateResponse = updateRequest.process(this.solrClient);

    if (log.isInfoEnabled()) {
        log.info("Wrote {} documents(s) for topic '{}' to solr in {} ms", documentsToFlush, this.topic,
                updateResponse.getElapsedTime());
    }

    this.updateRequest.clear();
}

From source file:net.hasor.search.server.rsf.service.SorlDumpService.java

License:Apache License

private UpdateSearchResult doExecute(ExecuteService exec) {
    try {/*from  w  ww. j a v a2s.  c  o  m*/
        SolrClient solrClient = this.getSolrClient();
        RsfOptionSet optionSet = this.getRsfOptionSet();
        String commit = optionSet.getOption(OptionConstant.COMMIT_KEY);
        //
        UpdateResponse res = exec.doExecute(solrClient);
        if (StringUtils.equalsBlankIgnoreCase(commit, OptionConstant.COMMIT_VALUE)) {
            boolean waitFlush = StrUtils.parseBool(optionSet.getOption(OptionConstant.WAIT_FLUSH_KEY), true);
            boolean waitSearcher = StrUtils.parseBool(optionSet.getOption(OptionConstant.WAIT_SEARCHER_KEY),
                    true);
            boolean softCommit = StrUtils.parseBool(optionSet.getOption(OptionConstant.SOFT_COMMIT_KEY), false);
            res = solrClient.commit(waitFlush, waitSearcher, softCommit);
        }
        //
        UpdateSearchResult result = new UpdateSearchResult();
        result.setSuccess(false);
        result.setElapsedTime(res.getElapsedTime());
        result.setStatus(res.getStatus());
        result.setQueryTime(res.getQTime());
        return result;
    } catch (Throwable e) {
        UpdateSearchResult result = new UpdateSearchResult();
        result.setSuccess(false);
        result.setThrowable(new SearchException(e.getMessage()));
        LoggerHelper.logSevere(e.getMessage(), e);
        return result;
    }
}

From source file:org.mycore.solr.index.handlers.content.MCRSolrMCRContentMapIndexHandler.java

License:Open Source License

@Override
public void index() throws IOException, SolrServerException {
    int totalCount = contentMap.size();
    LOGGER.info("Handling " + totalCount + " documents");
    //multithread processing will result in too many http request
    UpdateResponse updateResponse;
    try {/*w w  w. ja  v  a2s . c  o m*/
        Iterator<SolrInputDocument> documents = MCRSolrInputDocumentFactory.getInstance()
                .getDocuments(contentMap);
        SolrClient solrClient = getSolrClient();
        if (solrClient instanceof ConcurrentUpdateSolrClient) {
            //split up to speed up processing
            splitup(documents);
            return;
        }
        if (LOGGER.isDebugEnabled()) {
            ArrayList<SolrInputDocument> debugList = new ArrayList<>();
            while (documents.hasNext()) {
                debugList.add(documents.next());
            }
            LOGGER.debug("Sending these documents: " + debugList);
            //recreate documents interator;
            documents = debugList.iterator();
        }
        if (solrClient instanceof HttpSolrClient) {
            updateResponse = ((HttpSolrClient) solrClient).add(documents);
        } else {
            ArrayList<SolrInputDocument> docs = new ArrayList<>(totalCount);
            while (documents.hasNext()) {
                docs.add(documents.next());
            }
            updateResponse = solrClient.add(docs);
        }
    } catch (Throwable e) {
        LOGGER.warn("Error while indexing document collection. Split and retry.", e);
        splitup();
        return;
    }
    if (updateResponse.getStatus() != 0) {
        LOGGER.error(
                "Error while indexing document collection. Split and retry: " + updateResponse.getResponse());
        splitup();
    } else {
        LOGGER.info("Sending " + totalCount + " documents was successful in " + updateResponse.getElapsedTime()
                + " ms.");
    }

}

From source file:org.mycore.solr.index.handlers.document.MCRSolrInputDocumentsHandler.java

License:Open Source License

@Override
public void index() throws IOException, SolrServerException {
    if (documents == null || documents.isEmpty()) {
        LOGGER.warn("No input documents to index.");
        return;/*from   w  ww  .  j  a  va  2  s  .  co  m*/
    }
    int totalCount = documents.size();
    LOGGER.info("Handling " + totalCount + " documents");
    SolrClient solrClient = getSolrClient();
    if (solrClient instanceof ConcurrentUpdateSolrClient) {
        LOGGER.info("Detected ConcurrentUpdateSolrClient. Split up batch update.");
        splitDocuments();
        //for statistics:
        documents.clear();
        return;
    }
    UpdateResponse updateResponse;
    try {
        UpdateRequest updateRequest = getUpdateRequest(MCRSolrConstants.UPDATE_PATH);
        updateRequest.add(documents);
        updateResponse = updateRequest.process(getSolrClient());
    } catch (Throwable e) {
        LOGGER.warn("Error while indexing document collection. Split and retry.");
        splitDocuments();
        return;
    }
    if (updateResponse.getStatus() != 0) {
        LOGGER.error(
                "Error while indexing document collection. Split and retry: " + updateResponse.getResponse());
        splitDocuments();
    } else {
        LOGGER.info("Sending " + totalCount + " documents was successful in " + updateResponse.getElapsedTime()
                + " ms.");
    }
}

From source file:org.mycore.solr.index.handlers.MCRSolrOptimizeIndexHandler.java

License:Open Source License

@Override
public void index() throws IOException, SolrServerException {
    LOGGER.info("Sending optimize request to solr");
    UpdateResponse response = getSolrClient().optimize();
    LOGGER.info(MessageFormat.format("Optimize was {0}({1}ms)",
            (response.getStatus() == 0 ? "successful." : "UNSUCCESSFUL!"), response.getElapsedTime()));
}