Example usage for com.amazonaws.services.sqs.model SendMessageBatchRequestEntry SendMessageBatchRequestEntry

List of usage examples for com.amazonaws.services.sqs.model SendMessageBatchRequestEntry SendMessageBatchRequestEntry

Introduction

In this page you can find the example usage for com.amazonaws.services.sqs.model SendMessageBatchRequestEntry SendMessageBatchRequestEntry.

Prototype

public SendMessageBatchRequestEntry() 

Source Link

Document

Default constructor for SendMessageBatchRequestEntry object.

Usage

From source file:com.erudika.para.queue.AWSQueueUtils.java

License:Apache License

/**
 * Pushes a number of messages in batch to an SQS queue.
 * @param queueURL the URL of the SQS queue
 * @param messages the massage bodies/*from  ww  w.  j  a  va  2 s  .  c  o  m*/
 */
public static void pushMessages(String queueURL, List<String> messages) {
    if (!StringUtils.isBlank(queueURL) && messages != null) {
        // only allow strings - ie JSON
        try {
            int j = 0;
            List<SendMessageBatchRequestEntry> msgs = new ArrayList<SendMessageBatchRequestEntry>(MAX_MESSAGES);
            for (int i = 0; i < messages.size(); i++) {
                String message = messages.get(i);
                if (!StringUtils.isBlank(message)) {
                    msgs.add(new SendMessageBatchRequestEntry().withMessageBody(message).withId(i + ""));
                }
                if (++j >= MAX_MESSAGES || i == messages.size() - 1) {
                    if (!msgs.isEmpty()) {
                        getClient().sendMessageBatch(queueURL, msgs);
                        msgs.clear();
                    }
                    j = 0;
                }
            }
        } catch (AmazonServiceException ase) {
            logException(ase);
        } catch (AmazonClientException ace) {
            logger.error("Could not reach SQS. {}", ace.toString());
        }
    }
}

From source file:jp.classmethod.aws.gradle.sqs.AmazonSQSSendMessagesTask.java

License:Apache License

@TaskAction
public void sendMessages() {
    String queueUrl = getQueueUrl();
    Stream<String> messages = getMessages();

    if (queueUrl == null) {
        throw new GradleException("Must specify either queueName or queueUrl");
    }/*from  w  ww.j a v a 2s .  c  om*/
    if (messages == null) {
        throw new GradleException("Must provide messages to send to SQS");
    }

    AmazonSQSPluginExtension ext = getProject().getExtensions().getByType(AmazonSQSPluginExtension.class);
    AmazonSQS sqs = ext.getClient();

    final AtomicInteger counter = new AtomicInteger(0);
    List<SendMessageBatchRequestEntry> messageEntries = messages
            .map(message -> new SendMessageBatchRequestEntry()
                    .withId("gradle_message_index_" + counter.getAndIncrement()).withMessageBody(message))
            .collect(Collectors.toList());

    getLogger().info("Sending {} messages to {}", messageEntries.size(), queueUrl);
    Lists.partition(messageEntries, MAX_MESSAGE_SEND_BATCH_SIZE).parallelStream()
            .forEach(messagesToSend -> sqs.sendMessageBatch(
                    new SendMessageBatchRequest().withQueueUrl(queueUrl).withEntries(messagesToSend)));
}

From source file:org.apache.nifi.processors.aws.sqs.PutSQS.java

License:Apache License

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
    FlowFile flowFile = session.get();//from www  .j a va2s .c  om
    if (flowFile == null) {
        return;
    }

    final long startNanos = System.nanoTime();
    final AmazonSQSClient client = getClient();
    final SendMessageBatchRequest request = new SendMessageBatchRequest();
    final String queueUrl = context.getProperty(QUEUE_URL).evaluateAttributeExpressions(flowFile).getValue();
    request.setQueueUrl(queueUrl);

    final Set<SendMessageBatchRequestEntry> entries = new HashSet<>();

    final SendMessageBatchRequestEntry entry = new SendMessageBatchRequestEntry();
    entry.setId(flowFile.getAttribute("uuid"));
    final ByteArrayOutputStream baos = new ByteArrayOutputStream();
    session.exportTo(flowFile, baos);
    final String flowFileContent = baos.toString();
    entry.setMessageBody(flowFileContent);

    final Map<String, MessageAttributeValue> messageAttributes = new HashMap<>();

    for (final PropertyDescriptor descriptor : userDefinedProperties) {
        final MessageAttributeValue mav = new MessageAttributeValue();
        mav.setDataType("String");
        mav.setStringValue(context.getProperty(descriptor).evaluateAttributeExpressions(flowFile).getValue());
        messageAttributes.put(descriptor.getName(), mav);
    }

    entry.setMessageAttributes(messageAttributes);
    entry.setDelaySeconds(context.getProperty(DELAY).asTimePeriod(TimeUnit.SECONDS).intValue());
    entries.add(entry);

    request.setEntries(entries);

    try {
        client.sendMessageBatch(request);
    } catch (final Exception e) {
        getLogger().error("Failed to send messages to Amazon SQS due to {}; routing to failure",
                new Object[] { e });
        flowFile = session.penalize(flowFile);
        session.transfer(flowFile, REL_FAILURE);
        return;
    }

    getLogger().info("Successfully published message to Amazon SQS for {}", new Object[] { flowFile });
    session.transfer(flowFile, REL_SUCCESS);
    final long transmissionMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos);
    session.getProvenanceReporter().send(flowFile, queueUrl, transmissionMillis);
}

From source file:org.duracloud.common.queue.aws.SQSTaskQueue.java

License:Apache License

/**
 * Puts multiple tasks on the queue using batch puts.  The tasks argument
 * can contain more than 10 Tasks, in that case there will be multiple SQS
 * batch send requests made each containing up to 10 messages.
 *
 * @param tasks/*from   www.  j  a va2  s.c  o  m*/
 */
@Override
public void put(Set<Task> tasks) {
    String msgBody = null;
    SendMessageBatchRequestEntry msgEntry = null;
    Set<SendMessageBatchRequestEntry> msgEntries = new HashSet<>();
    for (Task task : tasks) {
        msgBody = unmarshallTask(task);
        msgEntry = new SendMessageBatchRequestEntry().withMessageBody(msgBody).withId(msgEntries.size() + ""); // must set unique ID for each msg in the batch request
        msgEntries.add(msgEntry);

        // Can only send batch of max 10 messages in a SQS queue request
        if (msgEntries.size() == 10) {
            this.sendBatchMessages(msgEntries);
            msgEntries.clear(); // clear the already sent messages
        }
    }

    // After for loop check to see if there are msgs in msgEntries that
    // haven't been sent yet because the size never reached 10.
    if (!msgEntries.isEmpty()) {
        this.sendBatchMessages(msgEntries);
    }
}

From source file:scheduler.ServerThread.java

License:Apache License

public void remoteBatchSend(BufferedReader in) throws ParseException {
    //Batch sending task to remote workers 
    List<SendMessageBatchRequestEntry> entries = new ArrayList<SendMessageBatchRequestEntry>();
    String message;//  w  w w.  jav  a  2  s .  c o m
    final int batchSize = 10;

    try {
        JSONParser parser = new JSONParser();

        while ((message = in.readLine()) != null) {

            JSONArray taskList = (JSONArray) parser.parse(message);

            for (int i = 0; i < taskList.size(); i++) {
                JSONObject task = (JSONObject) taskList.get(i);
                msg_cnt++;

                entries.add(new SendMessageBatchRequestEntry().withId(Integer.toString(msg_cnt))
                        .withMessageBody(task.toString()));

            }

            if (entries.size() == batchSize) {
                jobQ.batchSend(entries);
                entries.clear();
            }

        }

        if (!entries.isEmpty()) {
            jobQ.batchSend(entries);
            entries.clear();
        }

    } catch (IOException e) {
        e.printStackTrace();
    }

}