Example usage for com.amazonaws.services.dynamodbv2.model QueryResult getLastEvaluatedKey

List of usage examples for com.amazonaws.services.dynamodbv2.model QueryResult getLastEvaluatedKey

Introduction

In this page you can find the example usage for com.amazonaws.services.dynamodbv2.model QueryResult getLastEvaluatedKey.

Prototype


public java.util.Map<String, AttributeValue> getLastEvaluatedKey() 

Source Link

Document

The primary key of the item where the operation stopped, inclusive of the previous result set.

Usage

From source file:amazon.dynamodb.config.DynamoDBManager.java

License:Open Source License

/**
 * Query Amazon DynamoDB//from  w  w  w  . jav  a2s.  c o m
 *
 * @param hashKey Hash key for the query request.
 *
 * @param range The range of geohashs to query.
 *
 * @return The query result.
 */
public List<QueryResult> queryGeohash(QueryRequest queryRequest, long hashKey, GeoHashRango range) {
    List<QueryResult> queryResults = new ArrayList<QueryResult>();
    Map<String, AttributeValue> lastEvaluatedKey = null;

    do {
        Map<String, Condition> keyConditions = new HashMap<String, Condition>();

        Condition hashKeyCondition = new Condition().withComparisonOperator(ComparisonOperator.EQ)
                .withAttributeValueList(new AttributeValue().withN(String.valueOf(hashKey)));
        keyConditions.put(config.getHashKeyAttributeName(), hashKeyCondition);

        AttributeValue minRange = new AttributeValue().withN(Long.toString(range.getRangeMin()));
        AttributeValue maxRange = new AttributeValue().withN(Long.toString(range.getRangeMax()));

        Condition geohashCondition = new Condition().withComparisonOperator(ComparisonOperator.BETWEEN)
                .withAttributeValueList(minRange, maxRange);
        keyConditions.put(config.getGeohashAttributeName(), geohashCondition);

        queryRequest.withTableName(config.getTableName()).withKeyConditions(keyConditions)
                .withIndexName(config.getGeohashIndexName()).withConsistentRead(true)
                .withReturnConsumedCapacity(ReturnConsumedCapacity.TOTAL)
                .withExclusiveStartKey(lastEvaluatedKey);

        QueryResult queryResult = config.getDynamoDBClient().query(queryRequest);
        queryResults.add(queryResult);

        lastEvaluatedKey = queryResult.getLastEvaluatedKey();

    } while (lastEvaluatedKey != null);

    return queryResults;
}

From source file:com.amazon.janusgraph.diskstorage.dynamodb.QueryWorker.java

License:Open Source License

@Override
public QueryResultWrapper next() throws BackendException {
    final Query backoff = new ExponentialBackoff.Query(request, delegate, permitsToConsume);
    final QueryResult result = backoff.runWithBackoff();
    final ConsumedCapacity consumedCapacity = result.getConsumedCapacity();
    if (null != consumedCapacity) {
        permitsToConsume = Math.max((int) (consumedCapacity.getCapacityUnits() - 1.0), 1);
        totalCapacityUnits += consumedCapacity.getCapacityUnits();
    }/*from w  w w .j a  va  2 s  . co m*/

    if (result.getLastEvaluatedKey() != null && !result.getLastEvaluatedKey().isEmpty()) {
        request.setExclusiveStartKey(result.getLastEvaluatedKey());
    } else {
        markComplete();
    }
    // a update returned count
    returnedCount += result.getCount();

    // b update scanned count
    scannedCount += result.getScannedCount();
    // c add scanned finalItemList
    finalItemList.addAll(result.getItems());
    return new QueryResultWrapper(titanKey, result);
}

From source file:com.clicktravel.infrastructure.persistence.aws.dynamodb.DynamoDbTemplate.java

License:Apache License

private <T extends Item> Collection<T> executeQuery(final AttributeQuery query, final Class<T> itemClass) {
    final ItemConfiguration itemConfiguration = getItemConfiguration(itemClass);
    final com.amazonaws.services.dynamodbv2.model.Condition condition = new com.amazonaws.services.dynamodbv2.model.Condition();

    if (query.getCondition().getComparisonOperator() == Operators.NULL) {
        condition.setComparisonOperator(ComparisonOperator.NULL);
    } else if (query.getCondition().getComparisonOperator() == Operators.NOT_NULL) {
        condition.setComparisonOperator(ComparisonOperator.NOT_NULL);
    } else {/*from   ww  w  . j  ava2  s.  c o  m*/
        if (query.getCondition().getComparisonOperator() == Operators.EQUALS) {
            condition.setComparisonOperator(ComparisonOperator.EQ);
        } else if (query.getCondition().getComparisonOperator() == Operators.LESS_THAN_OR_EQUALS) {
            condition.setComparisonOperator(ComparisonOperator.LE);
        } else if (query.getCondition().getComparisonOperator() == Operators.GREATER_THAN_OR_EQUALS) {
            condition.setComparisonOperator(ComparisonOperator.GE);
        }

        final Collection<AttributeValue> attributeValueList = new ArrayList<>();

        for (final String value : query.getCondition().getValues()) {
            if (value != null && !value.isEmpty()) {
                attributeValueList.add(new AttributeValue(value));
            }
        }

        if (attributeValueList.size() == 0) {
            return new ArrayList<>();
        }

        condition.setAttributeValueList(attributeValueList);
    }

    final Map<String, com.amazonaws.services.dynamodbv2.model.Condition> conditions = new HashMap<>();
    conditions.put(query.getAttributeName(), condition);
    final List<T> totalItems = new ArrayList<>();
    Map<String, AttributeValue> lastEvaluatedKey = null;
    final String tableName = databaseSchemaHolder.schemaName() + "." + itemConfiguration.tableName();
    if (itemConfiguration.hasIndexOn(query.getAttributeName())) {
        do {
            final String queryAttributeName = query.getAttributeName();
            final PrimaryKeyDefinition primaryKeyDefinition = itemConfiguration.primaryKeyDefinition();
            final String primaryKeyPropertyName = primaryKeyDefinition.propertyName();
            final boolean isPrimaryKeyQuery = queryAttributeName.equals(primaryKeyPropertyName);
            final QueryRequest queryRequest = new QueryRequest().withTableName(tableName)
                    .withKeyConditions(conditions).withExclusiveStartKey(lastEvaluatedKey);
            if (!isPrimaryKeyQuery) {
                queryRequest.withIndexName(queryAttributeName + "_idx");
            }

            final QueryResult queryResult;
            try {
                queryResult = amazonDynamoDbClient.query(queryRequest);
            } catch (final AmazonServiceException e) {
                throw new PersistenceResourceFailureException("Failure while attempting DynamoDb Query", e);
            }
            totalItems.addAll(marshallIntoObjects(itemClass, queryResult.getItems()));
            lastEvaluatedKey = queryResult.getLastEvaluatedKey();
        } while (lastEvaluatedKey != null);

    } else {
        logger.debug("Performing table scan with query: " + query);
        do {
            final ScanRequest scanRequest = new ScanRequest().withTableName(tableName)
                    .withScanFilter(conditions).withExclusiveStartKey(lastEvaluatedKey);
            final ScanResult scanResult;
            try {
                scanResult = amazonDynamoDbClient.scan(scanRequest);
            } catch (final AmazonServiceException e) {
                throw new PersistenceResourceFailureException("Failure while attempting DynamoDb Scan", e);
            }
            totalItems.addAll(marshallIntoObjects(itemClass, scanResult.getItems()));
            lastEvaluatedKey = scanResult.getLastEvaluatedKey();
        } while (lastEvaluatedKey != null);
    }

    return totalItems;
}

From source file:com.facebook.presto.dynamodb.DynamodbClient.java

License:Apache License

public Iterator<List<Map<String, AttributeValue>>> getTableData(String name,
        Optional<Entry<String, AttributeValue>> hashKeyCondition,
        Optional<Entry<String, Condition>> rangeKeyCondition) {
    AtomicReference<Map<String, AttributeValue>> lastKeyEvaluated = new AtomicReference<>();
    AtomicBoolean firstRun = new AtomicBoolean(true);

    return new Iterator<List<Map<String, AttributeValue>>>() {
        @Override//from  www . j  a v a2s .co m
        public boolean hasNext() {
            return firstRun.get() && lastKeyEvaluated.get() != null;
        }

        @Override
        public List<Map<String, AttributeValue>> next() {
            firstRun.set(false);
            if (hashKeyCondition.isPresent()) {
                ImmutableMap.Builder<String, Condition> builder = ImmutableMap.builder();
                builder.put(hashKeyCondition.get().getKey(), new Condition()
                        .withAttributeValueList(hashKeyCondition.get().getValue()).withComparisonOperator(EQ));

                if (rangeKeyCondition.isPresent()) {
                    Entry<String, Condition> rangeEntry = rangeKeyCondition.get();
                    if (rangeEntry.getValue().getComparisonOperator() == EQ.name()
                            && rangeEntry.getValue().getAttributeValueList().size() == 1) {
                        GetItemResult item = dynamoDBClient.getItem(name,
                                ImmutableMap.of(hashKeyCondition.get().getKey(),
                                        hashKeyCondition.get().getValue(), rangeEntry.getKey(),
                                        rangeEntry.getValue().getAttributeValueList().get(0)));
                        return ImmutableList.of(item.getItem());
                    } else {
                        builder.put(rangeKeyCondition.get().getKey(), rangeKeyCondition.get().getValue());
                    }
                }

                QueryResult query = dynamoDBClient.query(
                        new QueryRequest().withTableName(name).withExclusiveStartKey(lastKeyEvaluated.get())
                                .withKeyConditions(builder.build()).withLimit(100000));

                lastKeyEvaluated.set(query.getLastEvaluatedKey());

                return query.getItems();
            } else {
                ScanResult scan = dynamoDBClient.scan(new ScanRequest()
                        .withExclusiveStartKey(lastKeyEvaluated.get()).withLimit(100000).withTableName(name));

                lastKeyEvaluated.set(scan.getLastEvaluatedKey());
                return scan.getItems();
            }
        }
    };
}

From source file:com.grublr.geo.dynamodb.internal.DynamoDBManager.java

License:Open Source License

/**
 * Query Amazon DynamoDB/* w w  w  . j a va  2 s  .co  m*/
 * 
 * @param hashKey
 *            Hash key for the query request.
 * 
 * @param range
 *            The range of geohashs to query.
 * 
 * @return The query result.
 */
public List<QueryResult> queryGeohash(QueryRequest queryRequest, long hashKey, GeohashRange range) {
    List<QueryResult> queryResults = new ArrayList<QueryResult>();
    Map<String, AttributeValue> lastEvaluatedKey = null;

    do {
        Map<String, Condition> keyConditions = new HashMap<String, Condition>();

        Condition hashKeyCondition = new Condition().withComparisonOperator(ComparisonOperator.EQ)
                .withAttributeValueList(new AttributeValue().withN(String.valueOf(hashKey)));
        keyConditions.put(config.getHashKeyAttributeName(), hashKeyCondition);

        AttributeValue minRange = new AttributeValue().withN(Long.toString(range.getRangeMin()));
        AttributeValue maxRange = new AttributeValue().withN(Long.toString(range.getRangeMax()));

        Condition geohashCondition = new Condition().withComparisonOperator(ComparisonOperator.BETWEEN)
                .withAttributeValueList(minRange, maxRange);
        keyConditions.put(config.getGeohashAttributeName(), geohashCondition);

        queryRequest.withTableName(config.getTableName()).withKeyConditions(keyConditions)
                .withIndexName(config.getGeohashIndexName()).withConsistentRead(true)
                .withReturnConsumedCapacity(ReturnConsumedCapacity.TOTAL)
                .withExclusiveStartKey(lastEvaluatedKey);

        QueryResult queryResult = config.getDynamoDBClient().query(queryRequest);
        queryResults.add(queryResult);

        lastEvaluatedKey = queryResult.getLastEvaluatedKey();

    } while (lastEvaluatedKey != null);

    return queryResults;
}

From source file:com.numenta.taurus.service.TaurusClient.java

License:Open Source License

/**
 * Get list of tweets for the given metric filtered by the given time range returning the
 * results as they become available asynchronously.
 *
 * @param metricName The metric name to retrieve the tweets from
 * @param from       The start time (aggregated) inclusive.
 * @param to         The end time (aggregated) inclusive.
 * @param callback   Callback for asynchronous call. It will be called on every {@link Tweet}
 */// w w  w  .  j  ava 2  s. c o  m
public void getTweets(String metricName, Date from, Date to, DataCallback<Tweet> callback)
        throws GrokException, IOException {
    if (metricName == null) {
        throw new ObjectNotFoundException("Cannot get tweets without metric name");
    }

    final TaurusDataFactory dataFactory = TaurusApplication.getInstance().getDataFactory();
    final SimpleDateFormat timestampFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss", Locale.US);
    timestampFormat.setTimeZone(TimeZone.getTimeZone("UTC"));

    // Key conditions
    Map<String, Condition> keyConditions = new HashMap<>();

    // uid = modelId
    Condition modelIdCond = new Condition().withComparisonOperator(ComparisonOperator.EQ)
            .withAttributeValueList(new AttributeValue(metricName));
    keyConditions.put("metric_name", modelIdCond);

    Condition timestampCondition;
    if (from != null && to != null) {
        // timestamp >= from and timestamp <=to
        timestampCondition = new Condition().withComparisonOperator(ComparisonOperator.BETWEEN)
                .withAttributeValueList(new AttributeValue().withS(timestampFormat.format(from)),
                        new AttributeValue().withS(timestampFormat.format(to)));
        keyConditions.put("agg_ts", timestampCondition);
    } else if (from != null) {
        // timestamp >= from
        timestampCondition = new Condition().withComparisonOperator(ComparisonOperator.GT)
                .withAttributeValueList(new AttributeValue().withS(timestampFormat.format(from)));
        keyConditions.put("agg_ts", timestampCondition);
    } else if (to != null) {
        // timestamp <= to
        timestampCondition = new Condition().withComparisonOperator(ComparisonOperator.LT)
                .withAttributeValueList(new AttributeValue().withS(timestampFormat.format(to)));
        keyConditions.put("agg_ts", timestampCondition);
    }

    // Prepare query request
    QueryRequest query = new QueryRequest().withTableName(TWEETS_TABLE)
            .withAttributesToGet("tweet_uid", "userid", "text", "username", "agg_ts", "created_at",
                    "retweet_count")
            .withKeyConditions(keyConditions).withScanIndexForward(false)
            .withIndexName("taurus.metric_data-metric_name_index");

    QueryResult result;
    String tweetId;
    String userId;
    String userName;
    String text;
    Date created;
    Date aggregated;
    AttributeValue retweet;
    int retweetCount;
    Map<String, AttributeValue> lastKey;
    try {
        do {
            // Get results
            result = _awsClient.query(query);
            for (Map<String, AttributeValue> item : result.getItems()) {
                tweetId = item.get("tweet_uid").getS();
                userId = item.get("userid").getS();
                text = item.get("text").getS();
                userName = item.get("username").getS();
                aggregated = DataUtils.parseGrokDate(item.get("agg_ts").getS());
                created = DataUtils.parseGrokDate(item.get("created_at").getS());

                // "retweet_count" is optional
                retweet = item.get("retweet_count");
                if (retweet != null && retweet.getN() != null) {
                    retweetCount = Integer.parseInt(retweet.getN());
                } else {
                    retweetCount = 0;
                }
                if (!callback.onData(dataFactory.createTweet(tweetId, aggregated, created, userId, userName,
                        text, retweetCount))) {
                    // Canceled by the user
                    break;
                }
            }
            // Make sure to get all pages
            lastKey = result.getLastEvaluatedKey();
            query.setExclusiveStartKey(lastKey);
        } while (lastKey != null);
    } catch (AmazonClientException e) {
        // Wraps Amazon's unchecked exception as IOException
        throw new IOException(e);
    }
}

From source file:com.numenta.taurus.service.TaurusClient.java

License:Open Source License

/**
 * Get Metric values only from DynamoDB/*from   w  ww. j a va  2s.co m*/
 *
 * @param modelId   The model to get the data from
 * @param from      The starting timestamp
 * @param to        The ending timestamp
 * @param ascending Specifies ascending (true) or descending (false)
 * @param callback  User defined callback to receive data
 */
public void getMetricValues(@NonNull String modelId, @NonNull Date from, @NonNull Date to, boolean ascending,
        @NonNull MetricValuesCallback callback) throws GrokException, IOException {

    // Get metric from cache
    ConcurrentSkipListMap<Long, CachedMetricValue> cache = _cachedMetricValues.get(modelId);
    if (cache == null) {
        cache = new ConcurrentSkipListMap<>();
        ConcurrentSkipListMap<Long, CachedMetricValue> oldValues = _cachedMetricValues.putIfAbsent(modelId,
                cache);
        if (oldValues != null) {
            // Found old cached values
            cache = oldValues;
        }
    }

    // Try to get metric values from cache
    ConcurrentNavigableMap<Long, CachedMetricValue> cached = cache.subMap(from.getTime(), true, to.getTime(),
            true);
    if (!cached.isEmpty()) {
        Log.d(TAG, "from=" + from.getTime() + ", firstKey=" + cache.firstKey());
        Log.d(TAG, "to=" + to.getTime() + ", lastKey=" + cache.lastKey());
        // Check if we found the values in the cache
        if (!cached.isEmpty()) {
            // Return cached values sorted based on "ascending" order
            Set<Map.Entry<Long, CachedMetricValue>> values;
            if (ascending) {
                values = cached.entrySet();
            } else {
                values = cached.descendingMap().entrySet();
            }
            for (Map.Entry<Long, CachedMetricValue> metricValue : values) {
                if (!callback.onData(modelId, metricValue.getKey(), metricValue.getValue().value,
                        metricValue.getValue().anomaly)) {
                    // Canceled by the user
                    break;
                }
            }
            return;
        }
    }
    final SimpleDateFormat timestampFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss", Locale.US);
    timestampFormat.setTimeZone(TimeZone.getTimeZone("UTC"));

    // Key conditions
    Map<String, Condition> keyConditions = new HashMap<>();

    // uid = modelId
    keyConditions.put("uid", new Condition().withComparisonOperator(ComparisonOperator.EQ)
            .withAttributeValueList(new AttributeValue(modelId)));

    // timestamp >= from and timestamp <=to
    Condition timestampCondition = new Condition().withComparisonOperator(ComparisonOperator.BETWEEN);
    if (from.compareTo(to) <= 0) {
        timestampCondition.withAttributeValueList(new AttributeValue().withS(timestampFormat.format(from)),
                new AttributeValue().withS(timestampFormat.format(to)));
    } else {
        // FIXME This should not happen.
        Log.e(TAG, "TaurusClient#getMetricValues: 'from date' should not be greater than 'to date");
        timestampCondition.withAttributeValueList(new AttributeValue().withS(timestampFormat.format(to)),
                new AttributeValue().withS(timestampFormat.format(from)));

    }
    keyConditions.put("timestamp", timestampCondition);

    // Prepare query request
    QueryRequest query = new QueryRequest().withTableName(METRIC_DATA_TABLE)
            .withAttributesToGet("timestamp", "metric_value", "anomaly_score").withKeyConditions(keyConditions)
            .withScanIndexForward(ascending);

    QueryResult result;
    Map<String, AttributeValue> lastKey;
    try {
        do {
            long timestamp;
            // Get results
            result = _awsClient.query(query);
            for (Map<String, AttributeValue> item : result.getItems()) {
                CachedMetricValue metricValue = new CachedMetricValue();
                timestamp = DataUtils.parseGrokDate(item.get("timestamp").getS()).getTime();
                metricValue.value = Float.parseFloat(item.get("metric_value").getN());
                metricValue.anomaly = Float.parseFloat(item.get("anomaly_score").getN());
                cache.put(timestamp, metricValue);
                if (!callback.onData(modelId, timestamp, metricValue.value, metricValue.anomaly)) {
                    // Canceled by the user
                    break;
                }
            }
            // Make sure to get all pages
            lastKey = result.getLastEvaluatedKey();
            query.setExclusiveStartKey(lastKey);
        } while (lastKey != null);
    } catch (AmazonClientException e) {
        // Wraps Amazon's unchecked exception as IOException
        throw new IOException(e);
    }
}

From source file:com.numenta.taurus.service.TaurusClient.java

License:Open Source License

/**
 * Get hourly aggregated data for all instances for a single day for the given time range
 *
 * @param date      The date to get the data from
 * @param fromHour  The start hour/*  ww w .ja  v a 2s  .co m*/
 * @param toHour    The end hour
 * @param ascending Specifies ascending (true) or descending (false)
 * @param callback  User defined callback to receive instance data
 */
public void getAllInstanceDataForDate(@NonNull Date date, int fromHour, int toHour, boolean ascending,
        @NonNull DataCallback<InstanceData> callback) throws GrokException, IOException {

    Map<String, Condition> keyConditions = new HashMap<>();

    // Use "date" as hash key
    SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd", Locale.US);
    dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
    keyConditions.put("date", new Condition().withComparisonOperator(ComparisonOperator.EQ)
            .withAttributeValueList(new AttributeValue(dateFormat.format(date))));

    String start = fromHour > 9 ? Integer.toString(fromHour) : "0" + fromHour;
    if (fromHour == toHour) {
        // One single hour
        keyConditions.put("hour", new Condition().withComparisonOperator(ComparisonOperator.EQ)
                .withAttributeValueList(new AttributeValue(start)));
    } else {
        // Use "hour" as range key
        String end = toHour > 9 ? Integer.toString(toHour) : "0" + toHour;
        keyConditions.put("hour", new Condition().withComparisonOperator(ComparisonOperator.BETWEEN)
                .withAttributeValueList(new AttributeValue(start), new AttributeValue(end)));
    }

    // Prepare query request
    QueryRequest query = new QueryRequest().withTableName(INSTANCE_DATA_HOURLY_TABLE)
            .withAttributesToGet("instance_id", "date_hour", "anomaly_score").withKeyConditions(keyConditions)
            .withScanIndexForward(ascending).withIndexName("taurus.instance_data_hourly-date_hour_index");

    Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"));
    QueryResult result;
    String instanceId;
    float anomalyScore;
    float score;
    Map<String, AttributeValue> scores;
    Map<String, AttributeValue> lastKey;
    Matcher match;
    EnumSet<MetricType> metricMask;
    TaurusDataFactory dataFactory = TaurusApplication.getInstance().getDataFactory();
    try {
        do {
            // Get data from DynamoDB
            result = _awsClient.query(query);
            for (Map<String, AttributeValue> item : result.getItems()) {
                // Convert "date_hour" to java milliseconds time
                match = DATE_HOUR_FORMAT_REGEX.matcher(item.get("date_hour").getS());
                if (match.matches()) {
                    calendar.clear();
                    calendar.set(Integer.parseInt(match.group(1)), Integer.parseInt(match.group(2)) - 1,
                            Integer.parseInt(match.group(3)), Integer.parseInt(match.group(4)), 0, 0);
                    instanceId = item.get("instance_id").getS();

                    // Get max anomaly scores
                    scores = item.get("anomaly_score").getM();
                    anomalyScore = 0;
                    double scaledScore;
                    metricMask = EnumSet.noneOf(MetricType.class);
                    for (Map.Entry<String, AttributeValue> entry : scores.entrySet()) {
                        score = Float.parseFloat(entry.getValue().getN());
                        scaledScore = DataUtils.logScale(Math.abs(score));
                        if (scaledScore >= TaurusApplication.getYellowBarFloor()) {
                            metricMask.add(MetricType.valueOf(entry.getKey()));
                        }
                        anomalyScore = Math.max(score, anomalyScore);
                    }

                    if (!callback.onData(dataFactory.createInstanceData(instanceId, AggregationType.Day,
                            calendar.getTimeInMillis(), anomalyScore, metricMask))) {
                        // Canceled by the user
                        break;
                    }
                }
            }
            // Make sure to get all pages
            lastKey = result.getLastEvaluatedKey();
            query.setExclusiveStartKey(lastKey);
        } while (lastKey != null);
    } catch (AmazonClientException e) {
        // Wraps Amazon's unchecked exception as IOException
        throw new IOException(e);
    }
}

From source file:com.rapid7.diskstorage.dynamodb.QueryWorker.java

License:Open Source License

@Override
public QueryResultWrapper next() throws BackendException {
    ExponentialBackoff.Query backoff = new ExponentialBackoff.Query(request, delegate, permitsToConsume);
    QueryResult result = backoff.runWithBackoff();
    ConsumedCapacity consumedCapacity = result.getConsumedCapacity();
    if (null != consumedCapacity) {
        permitsToConsume = Math.max((int) (consumedCapacity.getCapacityUnits() - 1.0), 1);
        totalCapacityUnits += consumedCapacity.getCapacityUnits();
    }/*w  w w . java2  s  .  c om*/

    if (result.getLastEvaluatedKey() != null && !result.getLastEvaluatedKey().isEmpty()) {
        request.setExclusiveStartKey(result.getLastEvaluatedKey());
    } else {
        markComplete();
    }
    // a update returned count
    returnedCount += result.getCount();

    // b update scanned count
    scannedCount += result.getScannedCount();
    // c add scanned items
    items.addAll(result.getItems());
    return new QueryResultWrapper(titanKey, result);
}

From source file:com.trk.aboutme.DynamoDB.DynamoDBManagerBooks.java

License:Open Source License

public static ArrayList<Books> getBooksList(String tableName, String catagory) {

    AmazonDynamoDBClient ddb = Shelf.clientManager.ddb();
    try {/* w  ww  . j  a  v a 2s. c  om*/

        String bookId = catagory;
        long twoWeeksAgoMilli = (new Date()).getTime() - (5L * 24L * 60L * 60L * 1000L);
        Date twoWeeksAgo = new Date();
        twoWeeksAgo.setTime(twoWeeksAgoMilli);
        SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
        String twoWeeksAgoStr = df.format(twoWeeksAgo);
        ArrayList<Books> resultList = new ArrayList<Books>();

        Map<String, AttributeValue> lastEvaluatedKey = null;
        do {

            Condition hashKeyCondition = new Condition()
                    .withComparisonOperator(ComparisonOperator.EQ.toString())
                    .withAttributeValueList(new AttributeValue().withS(bookId));

            Condition rangeKeyCondition = new Condition()
                    .withComparisonOperator(ComparisonOperator.GT.toString())
                    .withAttributeValueList(new AttributeValue().withS(twoWeeksAgoStr));

            Map<String, Condition> keyConditions = new HashMap<String, Condition>();
            keyConditions.put("bookID", hashKeyCondition);
            keyConditions.put("posteddate", rangeKeyCondition);

            QueryRequest queryRequest = new QueryRequest().withTableName(tableName)
                    .withKeyConditions(keyConditions).withSelect("title, datecreated").withLimit(5)
                    .withExclusiveStartKey(lastEvaluatedKey);

            QueryResult result = ddb.query(queryRequest);
            for (Map<String, AttributeValue> item : result.getItems()) {
                for (Map.Entry<String, AttributeValue> i : item.entrySet()) {

                    Books b = new Books();
                    AttributeValue value = i.getValue();
                    String vs = value.getS();

                    if (i.getKey().equals("title"))
                        b.setM_title(vs == null ? "" : vs);
                    if (i.getKey().equals("datecreated"))
                        b.setM_dateCreated(vs == null ? "" : vs);

                    resultList.add(b);
                }
            }
            lastEvaluatedKey = result.getLastEvaluatedKey();
        } while (lastEvaluatedKey != null);

        return resultList;

    } catch (AmazonServiceException ex) {
        Shelf.clientManager.wipeCredentialsOnAuthError(ex);
    }

    return null;
}