Example usage for com.amazonaws.services.cloudsearchdomain.model UploadDocumentsResult getAdds

List of usage examples for com.amazonaws.services.cloudsearchdomain.model UploadDocumentsResult getAdds

Introduction

In this page you can find the example usage for com.amazonaws.services.cloudsearchdomain.model UploadDocumentsResult getAdds.

Prototype


public Long getAdds() 

Source Link

Document

The number of documents that were added to the search domain.

Usage

From source file:com.digitalpebble.stormcrawler.aws.bolt.CloudSearchIndexerBolt.java

License:Apache License

public void sendBatch() {

    timeLastBatchSent = System.currentTimeMillis();

    // nothing to do
    if (numDocsInBatch == 0) {
        return;/*from  www .  jav  a  2  s .co  m*/
    }

    // close the array
    buffer.append(']');

    LOG.info("Sending {} docs to CloudSearch", numDocsInBatch);

    byte[] bb = buffer.toString().getBytes(StandardCharsets.UTF_8);

    if (dumpBatchFilesToTemp) {
        try {
            File temp = File.createTempFile("CloudSearch_", ".json");
            FileUtils.writeByteArrayToFile(temp, bb);
            LOG.info("Wrote batch file {}", temp.getName());
            // ack the tuples
            for (Tuple t : unacked) {
                _collector.ack(t);
            }
            unacked.clear();
        } catch (IOException e1) {
            LOG.error("Exception while generating batch file", e1);
            // fail the tuples
            for (Tuple t : unacked) {
                _collector.fail(t);
            }
            unacked.clear();
        } finally {
            // reset buffer and doc counter
            buffer = new StringBuffer(MAX_SIZE_BATCH_BYTES).append('[');
            numDocsInBatch = 0;
        }
        return;
    }
    // not in debug mode
    try (InputStream inputStream = new ByteArrayInputStream(bb)) {
        UploadDocumentsRequest batch = new UploadDocumentsRequest();
        batch.setContentLength((long) bb.length);
        batch.setContentType(ContentType.Applicationjson);
        batch.setDocuments(inputStream);
        UploadDocumentsResult result = client.uploadDocuments(batch);
        LOG.info(result.getStatus());
        for (DocumentServiceWarning warning : result.getWarnings()) {
            LOG.info(warning.getMessage());
        }
        if (!result.getWarnings().isEmpty()) {
            eventCounter.scope("Warnings").incrBy(result.getWarnings().size());
        }
        eventCounter.scope("Added").incrBy(result.getAdds());
        // ack the tuples
        for (Tuple t : unacked) {
            _collector.ack(t);
        }
        unacked.clear();
    } catch (Exception e) {
        LOG.error("Exception while sending batch", e);
        LOG.error(buffer.toString());
        // fail the tuples
        for (Tuple t : unacked) {
            _collector.fail(t);
        }
        unacked.clear();
    } finally {
        // reset buffer and doc counter
        buffer = new StringBuffer(MAX_SIZE_BATCH_BYTES).append('[');
        numDocsInBatch = 0;
    }
}

From source file:com.norconex.committer.cloudsearch.CloudSearchCommitter.java

License:Apache License

private void uploadBatchToCloudSearch(List<JSONObject> documentBatch) {
    // Convert the JSON list to String and read it as a stream from memory
    // (for increased performance), for it to be usable by the AWS 
    // CloudSearch UploadRequest. If memory becomes a concern, consider 
    // streaming to file.
    // ArrayList.toString() joins the elements in a JSON-compliant way.
    byte[] bytes;
    try {//ww  w .j  a  v  a 2 s. co m
        bytes = documentBatch.toString().getBytes(CharEncoding.UTF_8);
    } catch (UnsupportedEncodingException e) {
        throw new CommitterException("UTF-8 not supported by OS.", e);
    }
    try (ByteArrayInputStream is = new ByteArrayInputStream(bytes)) {
        UploadDocumentsRequest uploadRequest = new UploadDocumentsRequest();
        uploadRequest.setContentType("application/json");
        uploadRequest.setDocuments(is);
        uploadRequest.setContentLength((long) bytes.length);
        ensureAWSClient();
        UploadDocumentsResult result = awsClient.uploadDocuments(uploadRequest);
        LOG.info(result.getAdds() + " Add requests and " + result.getDeletes() + " Delete requests "
                + "sent to the AWS CloudSearch domain.");
    } catch (IOException e) {
        throw new CommitterException("Could not upload request to CloudSearch.", e);
    }
}