Example usage for com.amazonaws.services.dynamodbv2.document TableWriteItems TableWriteItems

List of usage examples for com.amazonaws.services.dynamodbv2.document TableWriteItems TableWriteItems

Introduction

In this page you can find the example usage for com.amazonaws.services.dynamodbv2.document TableWriteItems TableWriteItems.

Prototype

public TableWriteItems(String tableName) 

Source Link

Usage

From source file:com.telefonica.iot.cygnus.backends.dynamo.DynamoDBBackendImpl.java

License:Open Source License

@Override
public void putItems(String tableName, ArrayList<Item> aggregation) throws Exception {
    try {// w ww .  j  a va2s  . co  m
        TableWriteItems tableWriteItems = new TableWriteItems(tableName);
        tableWriteItems.withItemsToPut(aggregation);
        BatchWriteItemOutcome outcome = dynamoDB.batchWriteItem(tableWriteItems);
    } catch (Exception e) {
        LOGGER.error("Error while putting a batch of items in the table " + tableName + ". Details="
                + e.getMessage());
    } // try catch
}

From source file:org.apache.nifi.processors.aws.dynamodb.DeleteDynamoDB.java

License:Apache License

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
    List<FlowFile> flowFiles = session
            .get(context.getProperty(BATCH_SIZE).evaluateAttributeExpressions().asInteger());
    if (flowFiles == null || flowFiles.size() == 0) {
        return;//  w  w  w. ja  v a2 s.co m
    }

    Map<ItemKeys, FlowFile> keysToFlowFileMap = new HashMap<>();

    final String table = context.getProperty(TABLE).evaluateAttributeExpressions().getValue();

    final String hashKeyName = context.getProperty(HASH_KEY_NAME).evaluateAttributeExpressions().getValue();
    final String hashKeyValueType = context.getProperty(HASH_KEY_VALUE_TYPE).getValue();
    final String rangeKeyName = context.getProperty(RANGE_KEY_NAME).evaluateAttributeExpressions().getValue();
    final String rangeKeyValueType = context.getProperty(RANGE_KEY_VALUE_TYPE).getValue();

    TableWriteItems tableWriteItems = new TableWriteItems(table);

    for (FlowFile flowFile : flowFiles) {
        final Object hashKeyValue = getValue(context, HASH_KEY_VALUE_TYPE, HASH_KEY_VALUE, flowFile);
        final Object rangeKeyValue = getValue(context, RANGE_KEY_VALUE_TYPE, RANGE_KEY_VALUE, flowFile);

        if (!isHashKeyValueConsistent(hashKeyName, hashKeyValue, session, flowFile)) {
            continue;
        }

        if (!isRangeKeyValueConsistent(rangeKeyName, rangeKeyValue, session, flowFile)) {
            continue;
        }

        if (rangeKeyValue == null || StringUtils.isBlank(rangeKeyValue.toString())) {
            tableWriteItems.addHashOnlyPrimaryKeysToDelete(hashKeyName, hashKeyValue);
        } else {
            tableWriteItems.addHashAndRangePrimaryKeyToDelete(hashKeyName, hashKeyValue, rangeKeyName,
                    rangeKeyValue);
        }
        keysToFlowFileMap.put(new ItemKeys(hashKeyValue, rangeKeyValue), flowFile);
    }

    if (keysToFlowFileMap.isEmpty()) {
        return;
    }

    final DynamoDB dynamoDB = getDynamoDB();

    try {
        BatchWriteItemOutcome outcome = dynamoDB.batchWriteItem(tableWriteItems);

        handleUnprocessedItems(session, keysToFlowFileMap, table, hashKeyName, hashKeyValueType, rangeKeyName,
                rangeKeyValueType, outcome);

        // All non unprocessed items are successful
        for (FlowFile flowFile : keysToFlowFileMap.values()) {
            getLogger().debug("Successfully deleted item from dynamodb : " + table);
            session.transfer(flowFile, REL_SUCCESS);
        }
    } catch (AmazonServiceException exception) {
        getLogger().error("Could not process flowFiles due to service exception : " + exception.getMessage());
        List<FlowFile> failedFlowFiles = processServiceException(session, flowFiles, exception);
        session.transfer(failedFlowFiles, REL_FAILURE);
    } catch (AmazonClientException exception) {
        getLogger().error("Could not process flowFiles due to client exception : " + exception.getMessage());
        List<FlowFile> failedFlowFiles = processClientException(session, flowFiles, exception);
        session.transfer(failedFlowFiles, REL_FAILURE);
    } catch (Exception exception) {
        getLogger().error("Could not process flowFiles due to exception : " + exception.getMessage());
        List<FlowFile> failedFlowFiles = processException(session, flowFiles, exception);
        session.transfer(failedFlowFiles, REL_FAILURE);
    }
}

From source file:org.apache.nifi.processors.aws.dynamodb.PutDynamoDB.java

License:Apache License

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
    List<FlowFile> flowFiles = session
            .get(context.getProperty(BATCH_SIZE).evaluateAttributeExpressions().asInteger());
    if (flowFiles == null || flowFiles.size() == 0) {
        return;/*w w  w.ja  v  a2 s  .  c  o  m*/
    }

    Map<ItemKeys, FlowFile> keysToFlowFileMap = new HashMap<>();

    final String table = context.getProperty(TABLE).evaluateAttributeExpressions().getValue();

    final String hashKeyName = context.getProperty(HASH_KEY_NAME).evaluateAttributeExpressions().getValue();
    final String hashKeyValueType = context.getProperty(HASH_KEY_VALUE_TYPE).getValue();
    final String rangeKeyName = context.getProperty(RANGE_KEY_NAME).evaluateAttributeExpressions().getValue();
    final String rangeKeyValueType = context.getProperty(RANGE_KEY_VALUE_TYPE).getValue();
    final String jsonDocument = context.getProperty(JSON_DOCUMENT).evaluateAttributeExpressions().getValue();
    final String charset = context.getProperty(DOCUMENT_CHARSET).evaluateAttributeExpressions().getValue();

    TableWriteItems tableWriteItems = new TableWriteItems(table);

    for (FlowFile flowFile : flowFiles) {
        final Object hashKeyValue = getValue(context, HASH_KEY_VALUE_TYPE, HASH_KEY_VALUE, flowFile);
        final Object rangeKeyValue = getValue(context, RANGE_KEY_VALUE_TYPE, RANGE_KEY_VALUE, flowFile);

        if (!isHashKeyValueConsistent(hashKeyName, hashKeyValue, session, flowFile)) {
            continue;
        }

        if (!isRangeKeyValueConsistent(rangeKeyName, rangeKeyValue, session, flowFile)) {
            continue;
        }

        if (!isDataValid(flowFile, jsonDocument)) {
            flowFile = session.putAttribute(flowFile, AWS_DYNAMO_DB_ITEM_SIZE_ERROR,
                    "Max size of item + attribute should be 400kb but was " + flowFile.getSize()
                            + jsonDocument.length());
            session.transfer(flowFile, REL_FAILURE);
            continue;
        }

        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        session.exportTo(flowFile, baos);

        try {
            if (rangeKeyValue == null || StringUtils.isBlank(rangeKeyValue.toString())) {
                tableWriteItems.addItemToPut(new Item().withKeyComponent(hashKeyName, hashKeyValue)
                        .withJSON(jsonDocument, IOUtils.toString(baos.toByteArray(), charset)));
            } else {
                tableWriteItems.addItemToPut(new Item().withKeyComponent(hashKeyName, hashKeyValue)
                        .withKeyComponent(rangeKeyName, rangeKeyValue)
                        .withJSON(jsonDocument, IOUtils.toString(baos.toByteArray(), charset)));
            }
        } catch (IOException ioe) {
            getLogger().error("IOException while creating put item : " + ioe.getMessage());
            flowFile = session.putAttribute(flowFile, DYNAMODB_ITEM_IO_ERROR, ioe.getMessage());
            session.transfer(flowFile, REL_FAILURE);
        }
        keysToFlowFileMap.put(new ItemKeys(hashKeyValue, rangeKeyValue), flowFile);
    }

    if (keysToFlowFileMap.isEmpty()) {
        return;
    }

    final DynamoDB dynamoDB = getDynamoDB();

    try {
        BatchWriteItemOutcome outcome = dynamoDB.batchWriteItem(tableWriteItems);

        handleUnprocessedItems(session, keysToFlowFileMap, table, hashKeyName, hashKeyValueType, rangeKeyName,
                rangeKeyValueType, outcome);

        // Handle any remaining flowfiles
        for (FlowFile flowFile : keysToFlowFileMap.values()) {
            getLogger().debug("Successful posted items to dynamodb : " + table);
            session.transfer(flowFile, REL_SUCCESS);
        }
    } catch (AmazonServiceException exception) {
        getLogger().error("Could not process flowFiles due to service exception : " + exception.getMessage());
        List<FlowFile> failedFlowFiles = processServiceException(session, flowFiles, exception);
        session.transfer(failedFlowFiles, REL_FAILURE);
    } catch (AmazonClientException exception) {
        getLogger().error("Could not process flowFiles due to client exception : " + exception.getMessage());
        List<FlowFile> failedFlowFiles = processClientException(session, flowFiles, exception);
        session.transfer(failedFlowFiles, REL_FAILURE);
    } catch (Exception exception) {
        getLogger().error("Could not process flowFiles due to exception : " + exception.getMessage());
        List<FlowFile> failedFlowFiles = processException(session, flowFiles, exception);
        session.transfer(failedFlowFiles, REL_FAILURE);
    }
}

From source file:org.chodavarapu.jgitaws.repositories.PackDescriptionRepository.java

License:Eclipse Distribution License

private TableWriteItems createBatchRequest(List<PackDescriptionOperation> operations) {
    List<Item> itemsToPut = createItemsToPutList(operations);
    List<PrimaryKey> keysToDelete = createKeysToDeleteList(operations);

    TableWriteItems request = new TableWriteItems(configuration.getPackDescriptionsTableName());

    if (itemsToPut.size() > 0) {
        request.withItemsToPut(itemsToPut);
    }//from ww  w  .ja  v  a 2s  .  c  om

    if (keysToDelete.size() > 0) {
        request.withPrimaryKeysToDelete(keysToDelete.toArray(new PrimaryKey[keysToDelete.size()]));
    }

    return request;
}

From source file:ws.salient.aws.dynamodb.DynamoDBStore.java

License:Apache License

public void put(Session session, Command command, int requestIndex) {
    try {/*from w  w w  . ja v  a2  s. com*/

        eventsToPut.offer(encrypt(
                new Item()
                        .withPrimaryKey("sessionId", session.getSessionId(), "timestamp",
                                NANO_INSTANT.format(command.getTimestamp().plusNanos(requestIndex)))
                        .withBinary("command", json.writeValueAsBytes(command)),
                session.getSecretKey(), "command"));

        if (session.store(command)) {
            byte[] sessionBytes = session.toByteArray();
            byte[] properties = json.writeValueAsBytes(session.getProperties());
            Item item = new Item()
                    .withPrimaryKey("sessionId", command.getSessionId(), "timestamp",
                            command.getTimestamp().toString())
                    .withString("accountId", command.getAccountId())
                    .withMap("factCount", session.getFactCount())
                    .withInt("processCount", session.getProcessCount())
                    .withString("knowledgeBaseId", command.getKnowledgeBaseId())
                    .withBinary("session", sessionBytes).withBinary("properties", properties);

            if (session.getSecretKey() != null) {
                item.withMap("secretKey", new LinkedHashMap());
                item.getMap("secretKey").put("encrypted", session.getEncryptedKey());
                item.getMap("secretKey").put("algorithm", session.getSecretKey().getAlgorithm());
            }

            if (session.getSecretKey() != null) {
                item = encrypt(item, session.getSecretKey(), "properties", "session");
            }
            sessionsToPut.offer(item);
        }

        putItemExecutor.execute(() -> {
            List<Item> eventItems = new LinkedList();
            Item eventItem = eventsToPut.poll();
            while (eventItem != null) {
                eventItems.add(eventItem);
                eventItem = eventsToPut.poll();
            }
            if (!eventItems.isEmpty()) {
                TableWriteItems eventWriteItems = new TableWriteItems("SalientSessionEvent")
                        .withItemsToPut(eventItems);
                log.info("Storing events: " + eventItems.size());
                BatchWriteItemOutcome result = dynamodb.batchWriteItem(eventWriteItems);
                if (!result.getUnprocessedItems().isEmpty()) {
                    log.error("Unprocessed items: " + result.toString());
                }
            }
            Map<String, Item> sessionItems = new LinkedHashMap();
            Item sessionItem = sessionsToPut.poll();
            while (sessionItem != null) {
                // Only store latest session item
                sessionItems.put(sessionItem.getString("sessionId"), sessionItem);
                sessionItem = sessionsToPut.poll();
            }
            if (!sessionItems.isEmpty()) {
                TableWriteItems sessionWriteItems = new TableWriteItems("SalientSession")
                        .withItemsToPut(sessionItems.values());
                log.info("Storing sessions: " + sessionItems.size());
                BatchWriteItemOutcome result = dynamodb.batchWriteItem(sessionWriteItems);
                if (!result.getUnprocessedItems().isEmpty()) {
                    log.error("Unprocessed items: " + result.toString());
                }
            }
        });
    } catch (JsonProcessingException ex) {
        throw new RuntimeException(ex);
    }
}