Example usage for com.amazonaws.services.dynamodbv2.model TableDescription getTableSizeBytes

List of usage examples for com.amazonaws.services.dynamodbv2.model TableDescription getTableSizeBytes

Introduction

In this page you can find the example usage for com.amazonaws.services.dynamodbv2.model TableDescription getTableSizeBytes.

Prototype


public Long getTableSizeBytes() 

Source Link

Document

The total size of the specified table, in bytes.

Usage

From source file:aws.example.dynamodb.DescribeTable.java

License:Open Source License

public static void main(String[] args) {
    final String USAGE = "\n" + "Usage:\n" + "    DescribeTable <table>\n\n" + "Where:\n"
            + "    table - the table to get information about.\n\n" + "Example:\n"
            + "    DescribeTable HelloTable\n";

    if (args.length < 1) {
        System.out.println(USAGE);
        System.exit(1);//from ww  w.j  ava  2 s. c o m
    }

    String table_name = args[0];
    System.out.format("Getting description for %s\n\n", table_name);

    final AmazonDynamoDBClient ddb = new AmazonDynamoDBClient();

    try {
        TableDescription table_info = ddb.describeTable(table_name).getTable();

        if (table_info != null) {
            System.out.format("Table name  : %s\n", table_info.getTableName());
            System.out.format("Table ARN   : %s\n", table_info.getTableArn());
            System.out.format("Status      : %s\n", table_info.getTableStatus());
            System.out.format("Item count  : %d\n", table_info.getItemCount().longValue());
            System.out.format("Size (bytes): %d\n", table_info.getTableSizeBytes().longValue());

            ProvisionedThroughputDescription throughput_info = table_info.getProvisionedThroughput();
            System.out.println("Throughput");
            System.out.format("  Read Capacity : %d\n", throughput_info.getReadCapacityUnits().longValue());
            System.out.format("  Write Capacity: %d\n", throughput_info.getWriteCapacityUnits().longValue());

            List<AttributeDefinition> attributes = table_info.getAttributeDefinitions();
            System.out.println("Attributes");
            for (AttributeDefinition a : attributes) {
                System.out.format("  %s (%s)\n", a.getAttributeName(), a.getAttributeType());
            }
        }
    } catch (AmazonServiceException e) {
        System.err.println(e.getErrorMessage());
        System.exit(1);
    }
    System.out.println("\nDone!");
}

From source file:com.erudika.para.persistence.AWSDynamoUtils.java

License:Apache License

/**
 * Gives basic information about a DynamoDB table (status, creation date, size).
 * @param appid name of the {@link com.erudika.para.core.App}
 * @return a map//w  ww  .java2  s.c  o  m
 */
public static Map<String, Object> getTableStatus(final String appid) {
    if (StringUtils.isBlank(appid)) {
        return Collections.emptyMap();
    }
    try {
        final TableDescription td = getClient().describeTable(getTableNameForAppid(appid)).getTable();
        return new HashMap<String, Object>() {
            {
                put("id", appid);
                put("status", td.getTableStatus());
                put("created", td.getCreationDateTime().getTime());
                put("sizeBytes", td.getTableSizeBytes());
                put("itemCount", td.getItemCount());
                put("readCapacityUnits", td.getProvisionedThroughput().getReadCapacityUnits());
                put("writeCapacityUnits", td.getProvisionedThroughput().getWriteCapacityUnits());
            }
        };
    } catch (Exception e) {
        logger.error(null, e);
    }
    return Collections.emptyMap();
}

From source file:com.haskins.cloudtrailviewer.dialog.resourcedetail.detailpanels.DbTableDetail.java

License:Open Source License

private void buildUI(DescribeTableResult detail) {

    this.add(primaryScrollPane, BorderLayout.CENTER);

    if (detail.getTable() != null) {

        TableDescription table = detail.getTable();

        if (table.getCreationDateTime() != null) {
            primaryTableModel.addRow(new Object[] { "Created", getDateString(table.getCreationDateTime()) });
        }//from   w  w  w .  jav a  2  s  .  c  o m
        if (table.getItemCount() != null) {
            primaryTableModel.addRow(new Object[] { "Item Count", table.getItemCount() });
        }
        if (table.getLatestStreamArn() != null) {
            primaryTableModel.addRow(new Object[] { "Latest Stream Arn", table.getLatestStreamArn() });
        }
        if (table.getLatestStreamLabel() != null) {
            primaryTableModel.addRow(new Object[] { "Latest Stream Label", table.getLatestStreamLabel() });
        }
        if (table.getTableArn() != null) {
            primaryTableModel.addRow(new Object[] { "Arn", table.getTableArn() });
        }
        if (table.getTableName() != null) {
            primaryTableModel.addRow(new Object[] { "Name", table.getTableName() });
        }
        if (table.getTableSizeBytes() != null) {
            primaryTableModel.addRow(new Object[] { "Size (bytes)", table.getTableSizeBytes() });
        }
        if (table.getTableStatus() != null) {
            primaryTableModel.addRow(new Object[] { "Status", table.getTableStatus() });
        }
    }
}

From source file:org.apache.hadoop.dynamodb.DynamoDBUtil.java

License:Open Source License

public static Double calculateAverageItemSize(TableDescription description) {
    if (description.getItemCount() != 0) {
        return ((double) description.getTableSizeBytes()) / ((double) description.getItemCount());
    }/*from ww  w.j  a  v a2  s.c o  m*/
    return 0.0;
}

From source file:org.apache.hadoop.dynamodb.tools.DynamoDBExport.java

License:Open Source License

private void setTableProperties(JobConf jobConf, String tableName, Double readRatio, Integer totalSegments) {
    jobConf.set(DynamoDBConstants.TABLE_NAME, tableName);
    jobConf.set(DynamoDBConstants.INPUT_TABLE_NAME, tableName);
    jobConf.set(DynamoDBConstants.OUTPUT_TABLE_NAME, tableName);

    DynamoDBClient client = new DynamoDBClient(jobConf);
    TableDescription description = client.describeTable(tableName);

    Long readThroughput = description.getProvisionedThroughput().getReadCapacityUnits();
    Long writeThroughput = description.getProvisionedThroughput().getWriteCapacityUnits();
    Long itemCount = description.getItemCount();
    Long tableSizeBytes = description.getTableSizeBytes();
    Double averageItemSize = DynamoDBUtil.calculateAverageItemSize(description);

    jobConf.set(DynamoDBConstants.READ_THROUGHPUT, readThroughput.toString());
    jobConf.set(DynamoDBConstants.WRITE_THROUGHPUT, writeThroughput.toString());
    jobConf.set(DynamoDBConstants.ITEM_COUNT, itemCount.toString());
    jobConf.set(DynamoDBConstants.TABLE_SIZE_BYTES, tableSizeBytes.toString());
    jobConf.set(DynamoDBConstants.AVG_ITEM_SIZE, averageItemSize.toString());

    log.info("Read throughput:       " + readThroughput);
    log.info("Write throughput:      " + writeThroughput);
    log.info("Item count:            " + itemCount);
    log.info("Table size:            " + tableSizeBytes);
    log.info("Average item size:     " + averageItemSize);

    // Optional properties
    if (readRatio != null) {
        jobConf.set(DynamoDBConstants.THROUGHPUT_READ_PERCENT, readRatio.toString());
        log.info("Throughput read ratio: " + readRatio);
    }/*w  w  w  .  j  av a  2s  .  c  om*/

    if (totalSegments != null) {
        jobConf.set(DynamoDBConstants.SCAN_SEGMENTS, totalSegments.toString());
        log.info("Total segment count:   " + totalSegments);
    }
}

From source file:org.apache.hadoop.hive.dynamodb.DynamoDBStorageHandler.java

License:Open Source License

@Override
public void configureTableJobProperties(TableDesc tableDesc, Map<String, String> jobProperties) {
    DynamoDBClient client = new DynamoDBClient(conf,
            tableDesc.getProperties().getProperty(DynamoDBConstants.REGION));

    try {/*w  w  w .  j a va  2s .c o  m*/
        String tableName = HiveDynamoDBUtil.getDynamoDBTableName(
                tableDesc.getProperties().getProperty(DynamoDBConstants.TABLE_NAME), tableDesc.getTableName());
        TableDescription description = client.describeTable(tableName);
        Double averageItemSize = DynamoDBUtil.calculateAverageItemSize(description);
        log.info("Average item size: " + averageItemSize);

        String endpoint = conf.get(DynamoDBConstants.ENDPOINT);
        if (!Strings.isNullOrEmpty(tableDesc.getProperties().getProperty(DynamoDBConstants.ENDPOINT))) {
            endpoint = tableDesc.getProperties().getProperty(DynamoDBConstants.ENDPOINT);
        }

        if (!Strings.isNullOrEmpty(endpoint)) {
            jobProperties.put(DynamoDBConstants.ENDPOINT, endpoint);
        }

        if (!Strings.isNullOrEmpty(tableDesc.getProperties().getProperty(DynamoDBConstants.REGION))) {
            jobProperties.put(DynamoDBConstants.REGION,
                    tableDesc.getProperties().getProperty(DynamoDBConstants.REGION));
        }

        jobProperties.put(DynamoDBConstants.OUTPUT_TABLE_NAME, tableName);
        jobProperties.put(DynamoDBConstants.INPUT_TABLE_NAME, tableName);
        jobProperties.put(DynamoDBConstants.TABLE_NAME, tableName);

        Map<String, String> hiveToDynamoDBSchemaMapping = HiveDynamoDBUtil.getHiveToDynamoDBSchemaMapping(
                tableDesc.getProperties().getProperty(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING));

        // Column map can be null if only full backup is being used
        if (hiveToDynamoDBSchemaMapping != null) {
            jobProperties.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING,
                    HiveDynamoDBUtil.toJsonString(hiveToDynamoDBSchemaMapping));
        }

        if (tableDesc.getProperties().getProperty(DynamoDBConstants.THROUGHPUT_READ_PERCENT) != null) {
            jobProperties.put(DynamoDBConstants.THROUGHPUT_READ_PERCENT,
                    tableDesc.getProperties().getProperty(DynamoDBConstants.THROUGHPUT_READ_PERCENT));
        }

        if (tableDesc.getProperties().getProperty(DynamoDBConstants.THROUGHPUT_WRITE_PERCENT) != null) {
            jobProperties.put(DynamoDBConstants.THROUGHPUT_WRITE_PERCENT,
                    tableDesc.getProperties().getProperty(DynamoDBConstants.THROUGHPUT_WRITE_PERCENT));
        }

        String readThroughput = description.getProvisionedThroughput().getReadCapacityUnits().toString();
        String writeThroughput = description.getProvisionedThroughput().getWriteCapacityUnits().toString();

        jobProperties.put(DynamoDBConstants.READ_THROUGHPUT, readThroughput);
        jobProperties.put(DynamoDBConstants.WRITE_THROUGHPUT, writeThroughput);
        jobProperties.put(DynamoDBConstants.ITEM_COUNT, description.getItemCount().toString());
        jobProperties.put(DynamoDBConstants.TABLE_SIZE_BYTES, description.getTableSizeBytes().toString());
        jobProperties.put(DynamoDBConstants.AVG_ITEM_SIZE, averageItemSize.toString());

        log.info("Average item size: " + averageItemSize);
        log.info("Item count: " + description.getItemCount());
        log.info("Table size: " + description.getTableSizeBytes());
        log.info("Read throughput: " + readThroughput);
        log.info("Write throughput: " + writeThroughput);

    } finally {
        client.close();
    }
}

From source file:org.xmlsh.aws.util.AWSDDBCommand.java

License:BSD License

protected void writeTableDescription(TableDescription tableDescription) throws XMLStreamException {
    startElement("table");
    attribute("name", tableDescription.getTableName());
    attribute("status", tableDescription.getTableStatus());
    attribute("create-date", Util.formatXSDateTime(tableDescription.getCreationDateTime()));
    attribute("item-count", tableDescription.getItemCount());
    attribute("size", tableDescription.getTableSizeBytes());
    attribute("item-count", tableDescription.getItemCount());

    writeAttributeDefinitions(tableDescription.getAttributeDefinitions());
    writeKeySchemaList(tableDescription.getKeySchema());
    writeLocalSecondaryIndexes(tableDescription.getLocalSecondaryIndexes());
    writeGlobalSecondaryIndexes(tableDescription.getGlobalSecondaryIndexes());
    writeProvisionedThroughput(tableDescription.getProvisionedThroughput());

}