Example usage for com.amazonaws.services.dynamodbv2.model TableDescription getItemCount

List of usage examples for com.amazonaws.services.dynamodbv2.model TableDescription getItemCount

Introduction

In this page you can find the example usage for com.amazonaws.services.dynamodbv2.model TableDescription getItemCount.

Prototype


public Long getItemCount() 

Source Link

Document

The number of items in the specified table.

Usage

From source file:RandomQuery1OnDynamoDB.java

License:Open Source License

public static void main(String[] args) throws Exception {
    init();/*from w ww  .  ja v a2 s  . com*/

    try {

        // Describe our new table
        DescribeTableRequest describeTableRequest = new DescribeTableRequest().withTableName(tableName);
        TableDescription tableDescription = dynamoDB.describeTable(describeTableRequest).getTable();
        System.out.println("Table Description: " + tableDescription);

        System.out.println("Table count :" + tableDescription.getItemCount());

        long lStartTime = new Date().getTime();
        System.out.println("start time: " + lStartTime);
        Random rn = new Random();
        // Scan items for movies with a year attribute greater than 1985
        Long lItem = tableDescription.getItemCount();
        int iNoOfItems = lItem.intValue();
        System.out.println("no of item " + lItem);
        for (int i = 0; i <= 49999; i++) {
            String randomInt = Integer.toString(rn.nextInt(iNoOfItems));
            HashMap<String, Condition> scanFilter = new HashMap<String, Condition>();
            Condition condition = new Condition().withComparisonOperator(ComparisonOperator.EQ.toString())
                    .withAttributeValueList(new AttributeValue().withN(randomInt));
            scanFilter.put("id", condition);
            ScanRequest scanRequest = new ScanRequest(tableName).withScanFilter(scanFilter);
            ScanResult scanResult = dynamoDB.scan(scanRequest);
            System.out.println("Random No :" + randomInt + ":: Query no: " + (i + 1));

        }
        // calculate time difference for update file time
        long lEndTime = new Date().getTime();
        long difference = lEndTime - lStartTime;
        System.out.println("Elapsed milliseconds: " + difference);
        System.out.println("Elapsed seconds: " + difference * 0.001);
        System.out.println("Elapsed Minutes: " + (int) ((difference / (1000 * 60)) % 60));

    } catch (AmazonServiceException ase) {
        System.out.println("Caught an AmazonServiceException, which means your request made it "
                + "to AWS, but was rejected with an error response for some reason.");
        System.out.println("Error Message:    " + ase.getMessage());
        System.out.println("HTTP Status Code: " + ase.getStatusCode());
        System.out.println("AWS Error Code:   " + ase.getErrorCode());
        System.out.println("Error Type:       " + ase.getErrorType());
        System.out.println("Request ID:       " + ase.getRequestId());
    } catch (AmazonClientException ace) {
        System.out.println("Caught an AmazonClientException, which means the client encountered "
                + "a serious internal problem while trying to communicate with AWS, "
                + "such as not being able to access the network.");
        System.out.println("Error Message: " + ace.getMessage());
    }
}

From source file:RandomQuery2OnDynamoDB.java

License:Open Source License

public static void main(String[] args) throws Exception {
    init();/* w w w  .j a  v  a  2s. co m*/

    try {

        // Describe our new table
        DescribeTableRequest describeTableRequest = new DescribeTableRequest().withTableName(tableName);
        TableDescription tableDescription = dynamoDB.describeTable(describeTableRequest).getTable();
        System.out.println("Table Description: " + tableDescription);
        long lStartTime = new Date().getTime();
        System.out.println("start time: " + lStartTime);
        Random rn = new Random();
        Long lItem = tableDescription.getItemCount();
        int iNoOfItems = lItem.intValue();
        int iPointOnePercent = (int) ((int) iNoOfItems * 0.001);
        int iOnePercent = (int) ((int) iNoOfItems * 0.01);
        System.out.println("TotalItems:" + iNoOfItems + "::0.1% of data is :: " + iPointOnePercent
                + "::1% of data is :" + iOnePercent);

        // Generating 25000 random queries for 0.1 to 1 % of the data
        for (int i = 0; i <= 24999; i++) {
            String randomInt = Integer.toString(rn.nextInt(iOnePercent - iPointOnePercent) + iPointOnePercent);
            HashMap<String, Condition> scanFilter = new HashMap<String, Condition>();
            Condition condition = new Condition().withComparisonOperator(ComparisonOperator.EQ.toString())
                    .withAttributeValueList(new AttributeValue().withN(randomInt));
            scanFilter.put("id", condition);
            ScanRequest scanRequest = new ScanRequest(tableName).withScanFilter(scanFilter);
            ScanResult scanResult = dynamoDB.scan(scanRequest);
            System.out.println("Random No :" + randomInt + ":: Query no: " + (i + 1));

        }
        // calculate time difference for update file time
        long lEndTime = new Date().getTime();
        long difference = lEndTime - lStartTime;
        System.out.println("Elapsed milliseconds: " + difference);
        System.out.println("Elapsed seconds: " + difference * 0.001);
        System.out.println("Elapsed Minutes: " + (int) ((difference / (1000 * 60)) % 60));

    } catch (AmazonServiceException ase) {
        System.out.println("Caught an AmazonServiceException, which means your request made it "
                + "to AWS, but was rejected with an error response for some reason.");
        System.out.println("Error Message:    " + ase.getMessage());
        System.out.println("HTTP Status Code: " + ase.getStatusCode());
        System.out.println("AWS Error Code:   " + ase.getErrorCode());
        System.out.println("Error Type:       " + ase.getErrorType());
        System.out.println("Request ID:       " + ase.getRequestId());
    } catch (AmazonClientException ace) {
        System.out.println("Caught an AmazonClientException, which means the client encountered "
                + "a serious internal problem while trying to communicate with AWS, "
                + "such as not being able to access the network.");
        System.out.println("Error Message: " + ace.getMessage());
    }
}

From source file:aws.example.dynamodb.DescribeTable.java

License:Open Source License

public static void main(String[] args) {
    final String USAGE = "\n" + "Usage:\n" + "    DescribeTable <table>\n\n" + "Where:\n"
            + "    table - the table to get information about.\n\n" + "Example:\n"
            + "    DescribeTable HelloTable\n";

    if (args.length < 1) {
        System.out.println(USAGE);
        System.exit(1);//from  w  w w . j  a va2  s .co m
    }

    String table_name = args[0];
    System.out.format("Getting description for %s\n\n", table_name);

    final AmazonDynamoDBClient ddb = new AmazonDynamoDBClient();

    try {
        TableDescription table_info = ddb.describeTable(table_name).getTable();

        if (table_info != null) {
            System.out.format("Table name  : %s\n", table_info.getTableName());
            System.out.format("Table ARN   : %s\n", table_info.getTableArn());
            System.out.format("Status      : %s\n", table_info.getTableStatus());
            System.out.format("Item count  : %d\n", table_info.getItemCount().longValue());
            System.out.format("Size (bytes): %d\n", table_info.getTableSizeBytes().longValue());

            ProvisionedThroughputDescription throughput_info = table_info.getProvisionedThroughput();
            System.out.println("Throughput");
            System.out.format("  Read Capacity : %d\n", throughput_info.getReadCapacityUnits().longValue());
            System.out.format("  Write Capacity: %d\n", throughput_info.getWriteCapacityUnits().longValue());

            List<AttributeDefinition> attributes = table_info.getAttributeDefinitions();
            System.out.println("Attributes");
            for (AttributeDefinition a : attributes) {
                System.out.format("  %s (%s)\n", a.getAttributeName(), a.getAttributeType());
            }
        }
    } catch (AmazonServiceException e) {
        System.err.println(e.getErrorMessage());
        System.exit(1);
    }
    System.out.println("\nDone!");
}

From source file:com.erudika.para.persistence.AWSDynamoUtils.java

License:Apache License

/**
 * Gives basic information about a DynamoDB table (status, creation date, size).
 * @param appid name of the {@link com.erudika.para.core.App}
 * @return a map//w  w  w .ja  va 2  s.  com
 */
public static Map<String, Object> getTableStatus(final String appid) {
    if (StringUtils.isBlank(appid)) {
        return Collections.emptyMap();
    }
    try {
        final TableDescription td = getClient().describeTable(getTableNameForAppid(appid)).getTable();
        return new HashMap<String, Object>() {
            {
                put("id", appid);
                put("status", td.getTableStatus());
                put("created", td.getCreationDateTime().getTime());
                put("sizeBytes", td.getTableSizeBytes());
                put("itemCount", td.getItemCount());
                put("readCapacityUnits", td.getProvisionedThroughput().getReadCapacityUnits());
                put("writeCapacityUnits", td.getProvisionedThroughput().getWriteCapacityUnits());
            }
        };
    } catch (Exception e) {
        logger.error(null, e);
    }
    return Collections.emptyMap();
}

From source file:com.haskins.cloudtrailviewer.dialog.resourcedetail.detailpanels.DbTableDetail.java

License:Open Source License

private void buildUI(DescribeTableResult detail) {

    this.add(primaryScrollPane, BorderLayout.CENTER);

    if (detail.getTable() != null) {

        TableDescription table = detail.getTable();

        if (table.getCreationDateTime() != null) {
            primaryTableModel.addRow(new Object[] { "Created", getDateString(table.getCreationDateTime()) });
        }//from w ww .java2s.  co  m
        if (table.getItemCount() != null) {
            primaryTableModel.addRow(new Object[] { "Item Count", table.getItemCount() });
        }
        if (table.getLatestStreamArn() != null) {
            primaryTableModel.addRow(new Object[] { "Latest Stream Arn", table.getLatestStreamArn() });
        }
        if (table.getLatestStreamLabel() != null) {
            primaryTableModel.addRow(new Object[] { "Latest Stream Label", table.getLatestStreamLabel() });
        }
        if (table.getTableArn() != null) {
            primaryTableModel.addRow(new Object[] { "Arn", table.getTableArn() });
        }
        if (table.getTableName() != null) {
            primaryTableModel.addRow(new Object[] { "Name", table.getTableName() });
        }
        if (table.getTableSizeBytes() != null) {
            primaryTableModel.addRow(new Object[] { "Size (bytes)", table.getTableSizeBytes() });
        }
        if (table.getTableStatus() != null) {
            primaryTableModel.addRow(new Object[] { "Status", table.getTableStatus() });
        }
    }
}

From source file:org.apache.hadoop.dynamodb.DynamoDBUtil.java

License:Open Source License

public static Double calculateAverageItemSize(TableDescription description) {
    if (description.getItemCount() != 0) {
        return ((double) description.getTableSizeBytes()) / ((double) description.getItemCount());
    }/*  w  ww. j ava  2  s. c  o  m*/
    return 0.0;
}

From source file:org.apache.hadoop.dynamodb.tools.DynamoDBExport.java

License:Open Source License

private void setTableProperties(JobConf jobConf, String tableName, Double readRatio, Integer totalSegments) {
    jobConf.set(DynamoDBConstants.TABLE_NAME, tableName);
    jobConf.set(DynamoDBConstants.INPUT_TABLE_NAME, tableName);
    jobConf.set(DynamoDBConstants.OUTPUT_TABLE_NAME, tableName);

    DynamoDBClient client = new DynamoDBClient(jobConf);
    TableDescription description = client.describeTable(tableName);

    Long readThroughput = description.getProvisionedThroughput().getReadCapacityUnits();
    Long writeThroughput = description.getProvisionedThroughput().getWriteCapacityUnits();
    Long itemCount = description.getItemCount();
    Long tableSizeBytes = description.getTableSizeBytes();
    Double averageItemSize = DynamoDBUtil.calculateAverageItemSize(description);

    jobConf.set(DynamoDBConstants.READ_THROUGHPUT, readThroughput.toString());
    jobConf.set(DynamoDBConstants.WRITE_THROUGHPUT, writeThroughput.toString());
    jobConf.set(DynamoDBConstants.ITEM_COUNT, itemCount.toString());
    jobConf.set(DynamoDBConstants.TABLE_SIZE_BYTES, tableSizeBytes.toString());
    jobConf.set(DynamoDBConstants.AVG_ITEM_SIZE, averageItemSize.toString());

    log.info("Read throughput:       " + readThroughput);
    log.info("Write throughput:      " + writeThroughput);
    log.info("Item count:            " + itemCount);
    log.info("Table size:            " + tableSizeBytes);
    log.info("Average item size:     " + averageItemSize);

    // Optional properties
    if (readRatio != null) {
        jobConf.set(DynamoDBConstants.THROUGHPUT_READ_PERCENT, readRatio.toString());
        log.info("Throughput read ratio: " + readRatio);
    }/*from www .ja  v  a2  s  .co m*/

    if (totalSegments != null) {
        jobConf.set(DynamoDBConstants.SCAN_SEGMENTS, totalSegments.toString());
        log.info("Total segment count:   " + totalSegments);
    }
}

From source file:org.apache.hadoop.hive.dynamodb.DynamoDBStorageHandler.java

License:Open Source License

@Override
public void configureTableJobProperties(TableDesc tableDesc, Map<String, String> jobProperties) {
    DynamoDBClient client = new DynamoDBClient(conf,
            tableDesc.getProperties().getProperty(DynamoDBConstants.REGION));

    try {//from  ww  w.j  av a 2s .c  o  m
        String tableName = HiveDynamoDBUtil.getDynamoDBTableName(
                tableDesc.getProperties().getProperty(DynamoDBConstants.TABLE_NAME), tableDesc.getTableName());
        TableDescription description = client.describeTable(tableName);
        Double averageItemSize = DynamoDBUtil.calculateAverageItemSize(description);
        log.info("Average item size: " + averageItemSize);

        String endpoint = conf.get(DynamoDBConstants.ENDPOINT);
        if (!Strings.isNullOrEmpty(tableDesc.getProperties().getProperty(DynamoDBConstants.ENDPOINT))) {
            endpoint = tableDesc.getProperties().getProperty(DynamoDBConstants.ENDPOINT);
        }

        if (!Strings.isNullOrEmpty(endpoint)) {
            jobProperties.put(DynamoDBConstants.ENDPOINT, endpoint);
        }

        if (!Strings.isNullOrEmpty(tableDesc.getProperties().getProperty(DynamoDBConstants.REGION))) {
            jobProperties.put(DynamoDBConstants.REGION,
                    tableDesc.getProperties().getProperty(DynamoDBConstants.REGION));
        }

        jobProperties.put(DynamoDBConstants.OUTPUT_TABLE_NAME, tableName);
        jobProperties.put(DynamoDBConstants.INPUT_TABLE_NAME, tableName);
        jobProperties.put(DynamoDBConstants.TABLE_NAME, tableName);

        Map<String, String> hiveToDynamoDBSchemaMapping = HiveDynamoDBUtil.getHiveToDynamoDBSchemaMapping(
                tableDesc.getProperties().getProperty(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING));

        // Column map can be null if only full backup is being used
        if (hiveToDynamoDBSchemaMapping != null) {
            jobProperties.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING,
                    HiveDynamoDBUtil.toJsonString(hiveToDynamoDBSchemaMapping));
        }

        if (tableDesc.getProperties().getProperty(DynamoDBConstants.THROUGHPUT_READ_PERCENT) != null) {
            jobProperties.put(DynamoDBConstants.THROUGHPUT_READ_PERCENT,
                    tableDesc.getProperties().getProperty(DynamoDBConstants.THROUGHPUT_READ_PERCENT));
        }

        if (tableDesc.getProperties().getProperty(DynamoDBConstants.THROUGHPUT_WRITE_PERCENT) != null) {
            jobProperties.put(DynamoDBConstants.THROUGHPUT_WRITE_PERCENT,
                    tableDesc.getProperties().getProperty(DynamoDBConstants.THROUGHPUT_WRITE_PERCENT));
        }

        String readThroughput = description.getProvisionedThroughput().getReadCapacityUnits().toString();
        String writeThroughput = description.getProvisionedThroughput().getWriteCapacityUnits().toString();

        jobProperties.put(DynamoDBConstants.READ_THROUGHPUT, readThroughput);
        jobProperties.put(DynamoDBConstants.WRITE_THROUGHPUT, writeThroughput);
        jobProperties.put(DynamoDBConstants.ITEM_COUNT, description.getItemCount().toString());
        jobProperties.put(DynamoDBConstants.TABLE_SIZE_BYTES, description.getTableSizeBytes().toString());
        jobProperties.put(DynamoDBConstants.AVG_ITEM_SIZE, averageItemSize.toString());

        log.info("Average item size: " + averageItemSize);
        log.info("Item count: " + description.getItemCount());
        log.info("Table size: " + description.getTableSizeBytes());
        log.info("Read throughput: " + readThroughput);
        log.info("Write throughput: " + writeThroughput);

    } finally {
        client.close();
    }
}

From source file:org.xmlsh.aws.util.AWSDDBCommand.java

License:BSD License

protected void writeTableDescription(TableDescription tableDescription) throws XMLStreamException {
    startElement("table");
    attribute("name", tableDescription.getTableName());
    attribute("status", tableDescription.getTableStatus());
    attribute("create-date", Util.formatXSDateTime(tableDescription.getCreationDateTime()));
    attribute("item-count", tableDescription.getItemCount());
    attribute("size", tableDescription.getTableSizeBytes());
    attribute("item-count", tableDescription.getItemCount());

    writeAttributeDefinitions(tableDescription.getAttributeDefinitions());
    writeKeySchemaList(tableDescription.getKeySchema());
    writeLocalSecondaryIndexes(tableDescription.getLocalSecondaryIndexes());
    writeGlobalSecondaryIndexes(tableDescription.getGlobalSecondaryIndexes());
    writeProvisionedThroughput(tableDescription.getProvisionedThroughput());

}