Example usage for com.amazonaws.services.kinesis.model PutRecordsResultEntry getShardId

List of usage examples for com.amazonaws.services.kinesis.model PutRecordsResultEntry getShardId

Introduction

In this page you can find the example usage for com.amazonaws.services.kinesis.model PutRecordsResultEntry getShardId.

Prototype


public String getShardId() 

Source Link

Document

The shard ID for an individual record result.

Usage

From source file:com.streamsets.pipeline.stage.destination.kinesis.KinesisTarget.java

License:Apache License

private void validateSuccessfulRecord(Record record, PutRecordsResultEntry resultEntry) throws StageException {
    if (null == resultEntry.getSequenceNumber() || null == resultEntry.getShardId()
            || resultEntry.getSequenceNumber().isEmpty() || resultEntry.getShardId().isEmpty()) {
        // Some kind of other error, handle it.
        handleFailedRecord(record, "Missing SequenceId or ShardId.");
    }/*from  w ww.  j a va  2 s  .  c om*/
}

From source file:org.apache.nifi.processors.aws.kinesis.stream.PutKinesisStream.java

License:Apache License

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {

    final int batchSize = context.getProperty(BATCH_SIZE).asInteger();
    final long maxBufferSizeBytes = context.getProperty(MAX_MESSAGE_BUFFER_SIZE_MB).asDataSize(DataUnit.B)
            .longValue();//  w w w  .ja va  2s  .  co m

    List<FlowFile> flowFiles = filterMessagesByMaxSize(session, batchSize, maxBufferSizeBytes,
            AWS_KINESIS_ERROR_MESSAGE);

    HashMap<String, List<FlowFile>> hashFlowFiles = new HashMap<>();
    HashMap<String, List<PutRecordsRequestEntry>> recordHash = new HashMap<String, List<PutRecordsRequestEntry>>();

    final AmazonKinesisClient client = getClient();

    try {

        List<FlowFile> failedFlowFiles = new ArrayList<>();
        List<FlowFile> successfulFlowFiles = new ArrayList<>();

        // Prepare batch of records
        for (int i = 0; i < flowFiles.size(); i++) {
            FlowFile flowFile = flowFiles.get(i);

            String streamName = context.getProperty(KINESIS_STREAM_NAME).evaluateAttributeExpressions(flowFile)
                    .getValue();
            ;

            final ByteArrayOutputStream baos = new ByteArrayOutputStream();
            session.exportTo(flowFile, baos);
            PutRecordsRequestEntry record = new PutRecordsRequestEntry()
                    .withData(ByteBuffer.wrap(baos.toByteArray()));

            String partitionKey = context.getProperty(PutKinesisStream.KINESIS_PARTITION_KEY)
                    .evaluateAttributeExpressions(flowFiles.get(i)).getValue();

            if (StringUtils.isBlank(partitionKey) == false) {
                record.setPartitionKey(partitionKey);
            } else {
                record.setPartitionKey(Integer.toString(randomParitionKeyGenerator.nextInt()));
            }

            if (recordHash.containsKey(streamName) == false) {
                recordHash.put(streamName, new ArrayList<>());
            }
            if (hashFlowFiles.containsKey(streamName) == false) {
                hashFlowFiles.put(streamName, new ArrayList<>());
            }

            hashFlowFiles.get(streamName).add(flowFile);
            recordHash.get(streamName).add(record);
        }

        for (Map.Entry<String, List<PutRecordsRequestEntry>> entryRecord : recordHash.entrySet()) {
            String streamName = entryRecord.getKey();
            List<PutRecordsRequestEntry> records = entryRecord.getValue();

            if (records.size() > 0) {

                PutRecordsRequest putRecordRequest = new PutRecordsRequest();
                putRecordRequest.setStreamName(streamName);
                putRecordRequest.setRecords(records);
                PutRecordsResult results = client.putRecords(putRecordRequest);

                List<PutRecordsResultEntry> responseEntries = results.getRecords();
                for (int i = 0; i < responseEntries.size(); i++) {
                    PutRecordsResultEntry entry = responseEntries.get(i);
                    FlowFile flowFile = hashFlowFiles.get(streamName).get(i);

                    Map<String, String> attributes = new HashMap<>();
                    attributes.put(AWS_KINESIS_SHARD_ID, entry.getShardId());
                    attributes.put(AWS_KINESIS_SEQUENCE_NUMBER, entry.getSequenceNumber());

                    if (StringUtils.isBlank(entry.getErrorCode()) == false) {
                        attributes.put(AWS_KINESIS_ERROR_CODE, entry.getErrorCode());
                        attributes.put(AWS_KINESIS_ERROR_MESSAGE, entry.getErrorMessage());
                        flowFile = session.putAllAttributes(flowFile, attributes);
                        failedFlowFiles.add(flowFile);
                    } else {
                        flowFile = session.putAllAttributes(flowFile, attributes);
                        successfulFlowFiles.add(flowFile);
                    }
                }
            }
            recordHash.get(streamName).clear();
            records.clear();
        }

        if (failedFlowFiles.size() > 0) {
            session.transfer(failedFlowFiles, REL_FAILURE);
            getLogger().error("Failed to publish to kinesis records {}", new Object[] { failedFlowFiles });
        }
        if (successfulFlowFiles.size() > 0) {
            session.transfer(successfulFlowFiles, REL_SUCCESS);
            getLogger().debug("Successfully published to kinesis records {}",
                    new Object[] { successfulFlowFiles });
        }

    } catch (final Exception exception) {
        getLogger().error("Failed to publish due to exception {} flowfiles {} ",
                new Object[] { exception, flowFiles });
        session.transfer(flowFiles, REL_FAILURE);
        context.yield();
    }
}