Example usage for com.amazonaws.services.dynamodbv2.model TableDescription getProvisionedThroughput

List of usage examples for com.amazonaws.services.dynamodbv2.model TableDescription getProvisionedThroughput

Introduction

In this page you can find the example usage for com.amazonaws.services.dynamodbv2.model TableDescription getProvisionedThroughput.

Prototype


public ProvisionedThroughputDescription getProvisionedThroughput() 

Source Link

Document

The provisioned throughput settings for the table, consisting of read and write capacity units, along with data about increases and decreases.

Usage

From source file:aws.example.dynamodb.DescribeTable.java

License:Open Source License

public static void main(String[] args) {
    final String USAGE = "\n" + "Usage:\n" + "    DescribeTable <table>\n\n" + "Where:\n"
            + "    table - the table to get information about.\n\n" + "Example:\n"
            + "    DescribeTable HelloTable\n";

    if (args.length < 1) {
        System.out.println(USAGE);
        System.exit(1);//  ww  w.  ja v  a2 s  .c om
    }

    String table_name = args[0];
    System.out.format("Getting description for %s\n\n", table_name);

    final AmazonDynamoDBClient ddb = new AmazonDynamoDBClient();

    try {
        TableDescription table_info = ddb.describeTable(table_name).getTable();

        if (table_info != null) {
            System.out.format("Table name  : %s\n", table_info.getTableName());
            System.out.format("Table ARN   : %s\n", table_info.getTableArn());
            System.out.format("Status      : %s\n", table_info.getTableStatus());
            System.out.format("Item count  : %d\n", table_info.getItemCount().longValue());
            System.out.format("Size (bytes): %d\n", table_info.getTableSizeBytes().longValue());

            ProvisionedThroughputDescription throughput_info = table_info.getProvisionedThroughput();
            System.out.println("Throughput");
            System.out.format("  Read Capacity : %d\n", throughput_info.getReadCapacityUnits().longValue());
            System.out.format("  Write Capacity: %d\n", throughput_info.getWriteCapacityUnits().longValue());

            List<AttributeDefinition> attributes = table_info.getAttributeDefinitions();
            System.out.println("Attributes");
            for (AttributeDefinition a : attributes) {
                System.out.format("  %s (%s)\n", a.getAttributeName(), a.getAttributeType());
            }
        }
    } catch (AmazonServiceException e) {
        System.err.println(e.getErrorMessage());
        System.exit(1);
    }
    System.out.println("\nDone!");
}

From source file:com.erudika.para.persistence.AWSDynamoUtils.java

License:Apache License

/**
 * Gives basic information about a DynamoDB table (status, creation date, size).
 * @param appid name of the {@link com.erudika.para.core.App}
 * @return a map//from  ww  w  . j a  v  a2s .c  o m
 */
public static Map<String, Object> getTableStatus(final String appid) {
    if (StringUtils.isBlank(appid)) {
        return Collections.emptyMap();
    }
    try {
        final TableDescription td = getClient().describeTable(getTableNameForAppid(appid)).getTable();
        return new HashMap<String, Object>() {
            {
                put("id", appid);
                put("status", td.getTableStatus());
                put("created", td.getCreationDateTime().getTime());
                put("sizeBytes", td.getTableSizeBytes());
                put("itemCount", td.getItemCount());
                put("readCapacityUnits", td.getProvisionedThroughput().getReadCapacityUnits());
                put("writeCapacityUnits", td.getProvisionedThroughput().getWriteCapacityUnits());
            }
        };
    } catch (Exception e) {
        logger.error(null, e);
    }
    return Collections.emptyMap();
}

From source file:Database.TableFunctions.java

public static void getTableInformation() {
        TableDescription tableDescription = dynamoDB
                .describeTable(new DescribeTableRequest().withTableName(tablename)).getTable();
        System.out.format(/*from   w  w w .j a v  a2  s  . co m*/
                "Name: %s:\n" + "Status: %s \n" + "Provisioned Throughput (read capacity units/sec): %d \n"
                        + "Provisioned Throughput (write capacity units/sec): %d \n",
                tableDescription.getTableName(), tableDescription.getTableStatus(),
                tableDescription.getProvisionedThroughput().getReadCapacityUnits(),
                tableDescription.getProvisionedThroughput().getWriteCapacityUnits());
    }

From source file:dynamodb.CrudOperationsOnDynamoDBTable.java

License:Open Source License

static void getTableInformation() {

    System.out.println("Describing " + tableName);

    TableDescription tableDescription = dynamoDB.getTable(tableName).describe();
    System.out.format(/*  www  .  j av  a 2  s  .c  o  m*/
            "Name: %s:\n" + "Status: %s \n" + "Provisioned Throughput (read capacity units/sec): %d \n"
                    + "Provisioned Throughput (write capacity units/sec): %d \n",
            tableDescription.getTableName(), tableDescription.getTableStatus(),
            tableDescription.getProvisionedThroughput().getReadCapacityUnits(),
            tableDescription.getProvisionedThroughput().getWriteCapacityUnits());
}

From source file:jp.classmethod.aws.dynamodb.DynamoDbRepository.java

License:Open Source License

private TableDescription updateTable(TableDescription desc) {
    Preconditions.checkNotNull(desc, "table description must not be null");
    UpdateTableSpec spec = null;// w w  w . j  a  v  a2 s. c  om
    if (false == ptMap.get(tableNameSuffix).equals(convert(desc.getProvisionedThroughput()))) {
        //if the throughput of the table is not the same as the throughput in the ptMap configuration,
        //update the thruput of the table
        spec = new UpdateTableSpec().withProvisionedThroughput(ptMap.get(tableNameSuffix));
    }
    final List<GlobalSecondaryIndexUpdate> gsiUpdates = new ArrayList<>();
    if (desc.getGlobalSecondaryIndexes() != null && false == desc.getGlobalSecondaryIndexes().isEmpty()) {
        //if the table description has updates to secondary indexes
        desc.getGlobalSecondaryIndexes().forEach(gsi -> {
            //for each gsi in the table description
            final String indexName = gsi.getIndexName();
            ProvisionedThroughput pt = ptMap.get(indexName);
            if (pt != null && false == pt.equals(convert(gsi.getProvisionedThroughput()))) {
                //if the throughput of the gsi in the description is not the same as the throughput in the pt map
                //add an update to the gsi's thruput
                gsiUpdates
                        .add(new GlobalSecondaryIndexUpdate().withUpdate(new UpdateGlobalSecondaryIndexAction()
                                .withIndexName(indexName).withProvisionedThroughput(pt)));
            }
        });
    }
    if (false == gsiUpdates.isEmpty()) {
        if (spec == null) {
            spec = new UpdateTableSpec();
        }
        spec.withGlobalSecondaryIndexUpdates(gsiUpdates);
    }
    return spec == null ? null : table.updateTable(spec);
}

From source file:org.apache.beam.sdk.io.aws.dynamodb.DynamoDBIOTestHelper.java

License:Apache License

static void createTestTable(String tableName) {
    CreateTableResult res = createDynamoTable(tableName);

    TableDescription tableDesc = res.getTableDescription();

    Assert.assertEquals(tableName, tableDesc.getTableName());
    Assert.assertTrue(tableDesc.getKeySchema().toString().contains(ATTR_NAME_1));
    Assert.assertTrue(tableDesc.getKeySchema().toString().contains(ATTR_NAME_2));

    Assert.assertEquals(tableDesc.getProvisionedThroughput().getReadCapacityUnits(), Long.valueOf(1000));
    Assert.assertEquals(tableDesc.getProvisionedThroughput().getWriteCapacityUnits(), Long.valueOf(1000));
    Assert.assertEquals("ACTIVE", tableDesc.getTableStatus());
    Assert.assertEquals("arn:aws:dynamodb:us-east-1:000000000000:table/" + tableName, tableDesc.getTableArn());

    ListTablesResult tables = dynamoDBClient.listTables();
    Assert.assertEquals(1, tables.getTableNames().size());
}

From source file:org.apache.hadoop.dynamodb.tools.DynamoDBExport.java

License:Open Source License

private void setTableProperties(JobConf jobConf, String tableName, Double readRatio, Integer totalSegments) {
    jobConf.set(DynamoDBConstants.TABLE_NAME, tableName);
    jobConf.set(DynamoDBConstants.INPUT_TABLE_NAME, tableName);
    jobConf.set(DynamoDBConstants.OUTPUT_TABLE_NAME, tableName);

    DynamoDBClient client = new DynamoDBClient(jobConf);
    TableDescription description = client.describeTable(tableName);

    Long readThroughput = description.getProvisionedThroughput().getReadCapacityUnits();
    Long writeThroughput = description.getProvisionedThroughput().getWriteCapacityUnits();
    Long itemCount = description.getItemCount();
    Long tableSizeBytes = description.getTableSizeBytes();
    Double averageItemSize = DynamoDBUtil.calculateAverageItemSize(description);

    jobConf.set(DynamoDBConstants.READ_THROUGHPUT, readThroughput.toString());
    jobConf.set(DynamoDBConstants.WRITE_THROUGHPUT, writeThroughput.toString());
    jobConf.set(DynamoDBConstants.ITEM_COUNT, itemCount.toString());
    jobConf.set(DynamoDBConstants.TABLE_SIZE_BYTES, tableSizeBytes.toString());
    jobConf.set(DynamoDBConstants.AVG_ITEM_SIZE, averageItemSize.toString());

    log.info("Read throughput:       " + readThroughput);
    log.info("Write throughput:      " + writeThroughput);
    log.info("Item count:            " + itemCount);
    log.info("Table size:            " + tableSizeBytes);
    log.info("Average item size:     " + averageItemSize);

    // Optional properties
    if (readRatio != null) {
        jobConf.set(DynamoDBConstants.THROUGHPUT_READ_PERCENT, readRatio.toString());
        log.info("Throughput read ratio: " + readRatio);
    }//w  ww  .ja  va  2  s .  c o m

    if (totalSegments != null) {
        jobConf.set(DynamoDBConstants.SCAN_SEGMENTS, totalSegments.toString());
        log.info("Total segment count:   " + totalSegments);
    }
}

From source file:org.apache.hadoop.dynamodb.tools.DynamoDBImport.java

License:Open Source License

private void setTableProperties(JobConf jobConf, String tableName, Double writeRatio) {
    jobConf.set(DynamoDBConstants.OUTPUT_TABLE_NAME, tableName);
    jobConf.set(DynamoDBConstants.INPUT_TABLE_NAME, tableName);
    jobConf.set(DynamoDBConstants.TABLE_NAME, tableName);

    DynamoDBClient client = new DynamoDBClient(jobConf);
    TableDescription description = client.describeTable(tableName);

    Long readThroughput = description.getProvisionedThroughput().getReadCapacityUnits();
    Long writeThroughput = description.getProvisionedThroughput().getWriteCapacityUnits();

    jobConf.set(DynamoDBConstants.READ_THROUGHPUT, readThroughput.toString());
    jobConf.set(DynamoDBConstants.WRITE_THROUGHPUT, writeThroughput.toString());

    log.info("Read throughput:       " + readThroughput);
    log.info("Write throughput:      " + writeThroughput);

    // Optional properties
    if (writeRatio != null) {
        jobConf.set(DynamoDBConstants.THROUGHPUT_WRITE_PERCENT, writeRatio.toString());
        log.info("Throughput write ratio: " + writeRatio);
    }//  ww  w.ja  v a  2 s .c o m
}

From source file:org.apache.hadoop.hive.dynamodb.DynamoDBStorageHandler.java

License:Open Source License

@Override
public void configureTableJobProperties(TableDesc tableDesc, Map<String, String> jobProperties) {
    DynamoDBClient client = new DynamoDBClient(conf,
            tableDesc.getProperties().getProperty(DynamoDBConstants.REGION));

    try {/*from   w ww  .jav a2s .co  m*/
        String tableName = HiveDynamoDBUtil.getDynamoDBTableName(
                tableDesc.getProperties().getProperty(DynamoDBConstants.TABLE_NAME), tableDesc.getTableName());
        TableDescription description = client.describeTable(tableName);
        Double averageItemSize = DynamoDBUtil.calculateAverageItemSize(description);
        log.info("Average item size: " + averageItemSize);

        String endpoint = conf.get(DynamoDBConstants.ENDPOINT);
        if (!Strings.isNullOrEmpty(tableDesc.getProperties().getProperty(DynamoDBConstants.ENDPOINT))) {
            endpoint = tableDesc.getProperties().getProperty(DynamoDBConstants.ENDPOINT);
        }

        if (!Strings.isNullOrEmpty(endpoint)) {
            jobProperties.put(DynamoDBConstants.ENDPOINT, endpoint);
        }

        if (!Strings.isNullOrEmpty(tableDesc.getProperties().getProperty(DynamoDBConstants.REGION))) {
            jobProperties.put(DynamoDBConstants.REGION,
                    tableDesc.getProperties().getProperty(DynamoDBConstants.REGION));
        }

        jobProperties.put(DynamoDBConstants.OUTPUT_TABLE_NAME, tableName);
        jobProperties.put(DynamoDBConstants.INPUT_TABLE_NAME, tableName);
        jobProperties.put(DynamoDBConstants.TABLE_NAME, tableName);

        Map<String, String> hiveToDynamoDBSchemaMapping = HiveDynamoDBUtil.getHiveToDynamoDBSchemaMapping(
                tableDesc.getProperties().getProperty(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING));

        // Column map can be null if only full backup is being used
        if (hiveToDynamoDBSchemaMapping != null) {
            jobProperties.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING,
                    HiveDynamoDBUtil.toJsonString(hiveToDynamoDBSchemaMapping));
        }

        if (tableDesc.getProperties().getProperty(DynamoDBConstants.THROUGHPUT_READ_PERCENT) != null) {
            jobProperties.put(DynamoDBConstants.THROUGHPUT_READ_PERCENT,
                    tableDesc.getProperties().getProperty(DynamoDBConstants.THROUGHPUT_READ_PERCENT));
        }

        if (tableDesc.getProperties().getProperty(DynamoDBConstants.THROUGHPUT_WRITE_PERCENT) != null) {
            jobProperties.put(DynamoDBConstants.THROUGHPUT_WRITE_PERCENT,
                    tableDesc.getProperties().getProperty(DynamoDBConstants.THROUGHPUT_WRITE_PERCENT));
        }

        String readThroughput = description.getProvisionedThroughput().getReadCapacityUnits().toString();
        String writeThroughput = description.getProvisionedThroughput().getWriteCapacityUnits().toString();

        jobProperties.put(DynamoDBConstants.READ_THROUGHPUT, readThroughput);
        jobProperties.put(DynamoDBConstants.WRITE_THROUGHPUT, writeThroughput);
        jobProperties.put(DynamoDBConstants.ITEM_COUNT, description.getItemCount().toString());
        jobProperties.put(DynamoDBConstants.TABLE_SIZE_BYTES, description.getTableSizeBytes().toString());
        jobProperties.put(DynamoDBConstants.AVG_ITEM_SIZE, averageItemSize.toString());

        log.info("Average item size: " + averageItemSize);
        log.info("Item count: " + description.getItemCount());
        log.info("Table size: " + description.getTableSizeBytes());
        log.info("Read throughput: " + readThroughput);
        log.info("Write throughput: " + writeThroughput);

    } finally {
        client.close();
    }
}

From source file:org.xmlsh.aws.util.AWSDDBCommand.java

License:BSD License

protected void writeTableDescription(TableDescription tableDescription) throws XMLStreamException {
    startElement("table");
    attribute("name", tableDescription.getTableName());
    attribute("status", tableDescription.getTableStatus());
    attribute("create-date", Util.formatXSDateTime(tableDescription.getCreationDateTime()));
    attribute("item-count", tableDescription.getItemCount());
    attribute("size", tableDescription.getTableSizeBytes());
    attribute("item-count", tableDescription.getItemCount());

    writeAttributeDefinitions(tableDescription.getAttributeDefinitions());
    writeKeySchemaList(tableDescription.getKeySchema());
    writeLocalSecondaryIndexes(tableDescription.getLocalSecondaryIndexes());
    writeGlobalSecondaryIndexes(tableDescription.getGlobalSecondaryIndexes());
    writeProvisionedThroughput(tableDescription.getProvisionedThroughput());

}