List of usage examples for com.amazonaws.services.dynamodbv2.model ComparisonOperator BETWEEN
ComparisonOperator BETWEEN
To view the source code for com.amazonaws.services.dynamodbv2.model ComparisonOperator BETWEEN.
Click Source Link
From source file:amazon.dynamodb.config.DynamoDBManager.java
License:Open Source License
/** * Query Amazon DynamoDB/*from w w w. j a v a 2 s . co m*/ * * @param hashKey Hash key for the query request. * * @param range The range of geohashs to query. * * @return The query result. */ public List<QueryResult> queryGeohash(QueryRequest queryRequest, long hashKey, GeoHashRango range) { List<QueryResult> queryResults = new ArrayList<QueryResult>(); Map<String, AttributeValue> lastEvaluatedKey = null; do { Map<String, Condition> keyConditions = new HashMap<String, Condition>(); Condition hashKeyCondition = new Condition().withComparisonOperator(ComparisonOperator.EQ) .withAttributeValueList(new AttributeValue().withN(String.valueOf(hashKey))); keyConditions.put(config.getHashKeyAttributeName(), hashKeyCondition); AttributeValue minRange = new AttributeValue().withN(Long.toString(range.getRangeMin())); AttributeValue maxRange = new AttributeValue().withN(Long.toString(range.getRangeMax())); Condition geohashCondition = new Condition().withComparisonOperator(ComparisonOperator.BETWEEN) .withAttributeValueList(minRange, maxRange); keyConditions.put(config.getGeohashAttributeName(), geohashCondition); queryRequest.withTableName(config.getTableName()).withKeyConditions(keyConditions) .withIndexName(config.getGeohashIndexName()).withConsistentRead(true) .withReturnConsumedCapacity(ReturnConsumedCapacity.TOTAL) .withExclusiveStartKey(lastEvaluatedKey); QueryResult queryResult = config.getDynamoDBClient().query(queryRequest); queryResults.add(queryResult); lastEvaluatedKey = queryResult.getLastEvaluatedKey(); } while (lastEvaluatedKey != null); return queryResults; }
From source file:com.dell.doradus.db.s3.DynamoDBService2.java
License:Apache License
@Override public List<DColumn> getColumns(String storeName, String rowKey, String startColumn, String endColumn, int count) { Timer t = new Timer(); String key = storeName + "_" + rowKey; HashMap<String, Condition> keyConditions = new HashMap<String, Condition>(); keyConditions.put("key", new Condition().withComparisonOperator(ComparisonOperator.EQ) .withAttributeValueList(new AttributeValue().withS(key))); if (startColumn != null && endColumn != null) { keyConditions.put("column", new Condition().withComparisonOperator(ComparisonOperator.BETWEEN).withAttributeValueList( new AttributeValue().withS(startColumn), new AttributeValue(endColumn))); } else if (startColumn != null) { keyConditions.put("column", new Condition().withComparisonOperator(ComparisonOperator.GE) .withAttributeValueList(new AttributeValue().withS(startColumn))); } else if (endColumn != null) { keyConditions.put("column", new Condition().withComparisonOperator(ComparisonOperator.LT) .withAttributeValueList(new AttributeValue().withS(endColumn))); }/*w w w. ja v a2 s . c o m*/ QueryRequest request = new QueryRequest().withTableName(getTenant().getName()) .withLimit(Math.min(100, count)).withKeyConditions(keyConditions); QueryResult result = m_client.query(request); List<DColumn> list = fromItems(result.getItems()); m_logger.debug("get columns range for {} in {}", getTenant().getName(), t); return list; }
From source file:com.grublr.geo.dynamodb.internal.DynamoDBManager.java
License:Open Source License
/** * Query Amazon DynamoDB/*from w w w.j av a 2 s .c o m*/ * * @param hashKey * Hash key for the query request. * * @param range * The range of geohashs to query. * * @return The query result. */ public List<QueryResult> queryGeohash(QueryRequest queryRequest, long hashKey, GeohashRange range) { List<QueryResult> queryResults = new ArrayList<QueryResult>(); Map<String, AttributeValue> lastEvaluatedKey = null; do { Map<String, Condition> keyConditions = new HashMap<String, Condition>(); Condition hashKeyCondition = new Condition().withComparisonOperator(ComparisonOperator.EQ) .withAttributeValueList(new AttributeValue().withN(String.valueOf(hashKey))); keyConditions.put(config.getHashKeyAttributeName(), hashKeyCondition); AttributeValue minRange = new AttributeValue().withN(Long.toString(range.getRangeMin())); AttributeValue maxRange = new AttributeValue().withN(Long.toString(range.getRangeMax())); Condition geohashCondition = new Condition().withComparisonOperator(ComparisonOperator.BETWEEN) .withAttributeValueList(minRange, maxRange); keyConditions.put(config.getGeohashAttributeName(), geohashCondition); queryRequest.withTableName(config.getTableName()).withKeyConditions(keyConditions) .withIndexName(config.getGeohashIndexName()).withConsistentRead(true) .withReturnConsumedCapacity(ReturnConsumedCapacity.TOTAL) .withExclusiveStartKey(lastEvaluatedKey); QueryResult queryResult = config.getDynamoDBClient().query(queryRequest); queryResults.add(queryResult); lastEvaluatedKey = queryResult.getLastEvaluatedKey(); } while (lastEvaluatedKey != null); return queryResults; }
From source file:com.intuit.tank.persistence.databases.AmazonDynamoDatabaseDocApi.java
License:Open Source License
/** * @{inheritDoc//from w ww.j a v a 2s. c o m */ @SuppressWarnings("unchecked") @Override public PagedDatabaseResult getPagedItems(String tableName, Object nextToken, String minRange, String maxRange, String instanceId, String jobId) { List<Item> ret = new ArrayList<Item>(); Map<String, AttributeValue> lastKeyEvaluated = (Map<String, AttributeValue>) nextToken; ScanRequest scanRequest = new ScanRequest().withTableName(tableName); Map<String, Condition> conditions = new HashMap<String, Condition>(); if (jobId != null) { Condition jobIdCondition = new Condition(); jobIdCondition.withComparisonOperator(ComparisonOperator.EQ) .withAttributeValueList(new AttributeValue().withS(jobId)); conditions.put(DatabaseKeys.JOB_ID_KEY.getShortKey(), jobIdCondition); } if (StringUtils.isNotBlank(instanceId)) { // add a filter Condition filter = new Condition(); filter.withComparisonOperator(ComparisonOperator.EQ) .withAttributeValueList(new AttributeValue().withS(instanceId)); scanRequest.addScanFilterEntry(DatabaseKeys.INSTANCE_ID_KEY.getShortKey(), filter); } Condition rangeKeyCondition = new Condition(); if (minRange != null && maxRange != null) { rangeKeyCondition.withComparisonOperator(ComparisonOperator.BETWEEN.toString()) .withAttributeValueList(new AttributeValue().withS(minRange)) .withAttributeValueList(new AttributeValue().withS(maxRange)); } else if (minRange != null) { rangeKeyCondition.withComparisonOperator(ComparisonOperator.GE.toString()) .withAttributeValueList(new AttributeValue().withS(minRange)); } else if (maxRange != null) { rangeKeyCondition.withComparisonOperator(ComparisonOperator.LT.toString()) .withAttributeValueList(new AttributeValue().withS(maxRange)); } else { rangeKeyCondition = null; } if (rangeKeyCondition != null) { conditions.put(DatabaseKeys.REQUEST_NAME_KEY.getShortKey(), rangeKeyCondition); } scanRequest.withScanFilter(conditions); scanRequest.withExclusiveStartKey(lastKeyEvaluated); ScanResult result = dynamoDb.scan(scanRequest); for (Map<String, AttributeValue> item : result.getItems()) { ret.add(getItemFromResult(item)); } return new PagedDatabaseResult(ret, result.getLastEvaluatedKey()); }
From source file:com.kirana.dao.OrderDaoImpl.java
@Override public List<Order> getOrderBetween(long id, String FromDate, String ToDate) throws Exception { DynamoDBMapper mapper = new DynamoDBMapper(dbClient); Condition rangeKeyCondition = new Condition().withComparisonOperator(ComparisonOperator.BETWEEN.toString()) .withAttributeValueList(new AttributeValue().withS(FromDate), new AttributeValue().withS(ToDate)); DynamoDBQueryExpression<Order> queryExpression = new DynamoDBQueryExpression<Order>() .withHashKeyValues(new Order(id)).withRangeKeyCondition("created_at", rangeKeyCondition); queryExpression.setConsistentRead(false); List<Order> latestReplies = mapper.query(Order.class, queryExpression); return latestReplies; }
From source file:com.numenta.taurus.service.TaurusClient.java
License:Open Source License
/** * Get list of tweets for the given metric filtered by the given time range returning the * results as they become available asynchronously. * * @param metricName The metric name to retrieve the tweets from * @param from The start time (aggregated) inclusive. * @param to The end time (aggregated) inclusive. * @param callback Callback for asynchronous call. It will be called on every {@link Tweet} *//*from w w w .j a v a 2 s . c o m*/ public void getTweets(String metricName, Date from, Date to, DataCallback<Tweet> callback) throws GrokException, IOException { if (metricName == null) { throw new ObjectNotFoundException("Cannot get tweets without metric name"); } final TaurusDataFactory dataFactory = TaurusApplication.getInstance().getDataFactory(); final SimpleDateFormat timestampFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss", Locale.US); timestampFormat.setTimeZone(TimeZone.getTimeZone("UTC")); // Key conditions Map<String, Condition> keyConditions = new HashMap<>(); // uid = modelId Condition modelIdCond = new Condition().withComparisonOperator(ComparisonOperator.EQ) .withAttributeValueList(new AttributeValue(metricName)); keyConditions.put("metric_name", modelIdCond); Condition timestampCondition; if (from != null && to != null) { // timestamp >= from and timestamp <=to timestampCondition = new Condition().withComparisonOperator(ComparisonOperator.BETWEEN) .withAttributeValueList(new AttributeValue().withS(timestampFormat.format(from)), new AttributeValue().withS(timestampFormat.format(to))); keyConditions.put("agg_ts", timestampCondition); } else if (from != null) { // timestamp >= from timestampCondition = new Condition().withComparisonOperator(ComparisonOperator.GT) .withAttributeValueList(new AttributeValue().withS(timestampFormat.format(from))); keyConditions.put("agg_ts", timestampCondition); } else if (to != null) { // timestamp <= to timestampCondition = new Condition().withComparisonOperator(ComparisonOperator.LT) .withAttributeValueList(new AttributeValue().withS(timestampFormat.format(to))); keyConditions.put("agg_ts", timestampCondition); } // Prepare query request QueryRequest query = new QueryRequest().withTableName(TWEETS_TABLE) .withAttributesToGet("tweet_uid", "userid", "text", "username", "agg_ts", "created_at", "retweet_count") .withKeyConditions(keyConditions).withScanIndexForward(false) .withIndexName("taurus.metric_data-metric_name_index"); QueryResult result; String tweetId; String userId; String userName; String text; Date created; Date aggregated; AttributeValue retweet; int retweetCount; Map<String, AttributeValue> lastKey; try { do { // Get results result = _awsClient.query(query); for (Map<String, AttributeValue> item : result.getItems()) { tweetId = item.get("tweet_uid").getS(); userId = item.get("userid").getS(); text = item.get("text").getS(); userName = item.get("username").getS(); aggregated = DataUtils.parseGrokDate(item.get("agg_ts").getS()); created = DataUtils.parseGrokDate(item.get("created_at").getS()); // "retweet_count" is optional retweet = item.get("retweet_count"); if (retweet != null && retweet.getN() != null) { retweetCount = Integer.parseInt(retweet.getN()); } else { retweetCount = 0; } if (!callback.onData(dataFactory.createTweet(tweetId, aggregated, created, userId, userName, text, retweetCount))) { // Canceled by the user break; } } // Make sure to get all pages lastKey = result.getLastEvaluatedKey(); query.setExclusiveStartKey(lastKey); } while (lastKey != null); } catch (AmazonClientException e) { // Wraps Amazon's unchecked exception as IOException throw new IOException(e); } }
From source file:com.numenta.taurus.service.TaurusClient.java
License:Open Source License
/** * Get Metric values only from DynamoDB// www.ja va2 s . c o m * * @param modelId The model to get the data from * @param from The starting timestamp * @param to The ending timestamp * @param ascending Specifies ascending (true) or descending (false) * @param callback User defined callback to receive data */ public void getMetricValues(@NonNull String modelId, @NonNull Date from, @NonNull Date to, boolean ascending, @NonNull MetricValuesCallback callback) throws GrokException, IOException { // Get metric from cache ConcurrentSkipListMap<Long, CachedMetricValue> cache = _cachedMetricValues.get(modelId); if (cache == null) { cache = new ConcurrentSkipListMap<>(); ConcurrentSkipListMap<Long, CachedMetricValue> oldValues = _cachedMetricValues.putIfAbsent(modelId, cache); if (oldValues != null) { // Found old cached values cache = oldValues; } } // Try to get metric values from cache ConcurrentNavigableMap<Long, CachedMetricValue> cached = cache.subMap(from.getTime(), true, to.getTime(), true); if (!cached.isEmpty()) { Log.d(TAG, "from=" + from.getTime() + ", firstKey=" + cache.firstKey()); Log.d(TAG, "to=" + to.getTime() + ", lastKey=" + cache.lastKey()); // Check if we found the values in the cache if (!cached.isEmpty()) { // Return cached values sorted based on "ascending" order Set<Map.Entry<Long, CachedMetricValue>> values; if (ascending) { values = cached.entrySet(); } else { values = cached.descendingMap().entrySet(); } for (Map.Entry<Long, CachedMetricValue> metricValue : values) { if (!callback.onData(modelId, metricValue.getKey(), metricValue.getValue().value, metricValue.getValue().anomaly)) { // Canceled by the user break; } } return; } } final SimpleDateFormat timestampFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss", Locale.US); timestampFormat.setTimeZone(TimeZone.getTimeZone("UTC")); // Key conditions Map<String, Condition> keyConditions = new HashMap<>(); // uid = modelId keyConditions.put("uid", new Condition().withComparisonOperator(ComparisonOperator.EQ) .withAttributeValueList(new AttributeValue(modelId))); // timestamp >= from and timestamp <=to Condition timestampCondition = new Condition().withComparisonOperator(ComparisonOperator.BETWEEN); if (from.compareTo(to) <= 0) { timestampCondition.withAttributeValueList(new AttributeValue().withS(timestampFormat.format(from)), new AttributeValue().withS(timestampFormat.format(to))); } else { // FIXME This should not happen. Log.e(TAG, "TaurusClient#getMetricValues: 'from date' should not be greater than 'to date"); timestampCondition.withAttributeValueList(new AttributeValue().withS(timestampFormat.format(to)), new AttributeValue().withS(timestampFormat.format(from))); } keyConditions.put("timestamp", timestampCondition); // Prepare query request QueryRequest query = new QueryRequest().withTableName(METRIC_DATA_TABLE) .withAttributesToGet("timestamp", "metric_value", "anomaly_score").withKeyConditions(keyConditions) .withScanIndexForward(ascending); QueryResult result; Map<String, AttributeValue> lastKey; try { do { long timestamp; // Get results result = _awsClient.query(query); for (Map<String, AttributeValue> item : result.getItems()) { CachedMetricValue metricValue = new CachedMetricValue(); timestamp = DataUtils.parseGrokDate(item.get("timestamp").getS()).getTime(); metricValue.value = Float.parseFloat(item.get("metric_value").getN()); metricValue.anomaly = Float.parseFloat(item.get("anomaly_score").getN()); cache.put(timestamp, metricValue); if (!callback.onData(modelId, timestamp, metricValue.value, metricValue.anomaly)) { // Canceled by the user break; } } // Make sure to get all pages lastKey = result.getLastEvaluatedKey(); query.setExclusiveStartKey(lastKey); } while (lastKey != null); } catch (AmazonClientException e) { // Wraps Amazon's unchecked exception as IOException throw new IOException(e); } }
From source file:com.numenta.taurus.service.TaurusClient.java
License:Open Source License
/** * Get hourly aggregated data for all instances for a single day for the given time range * * @param date The date to get the data from * @param fromHour The start hour//w w w .ja v a 2 s . c o m * @param toHour The end hour * @param ascending Specifies ascending (true) or descending (false) * @param callback User defined callback to receive instance data */ public void getAllInstanceDataForDate(@NonNull Date date, int fromHour, int toHour, boolean ascending, @NonNull DataCallback<InstanceData> callback) throws GrokException, IOException { Map<String, Condition> keyConditions = new HashMap<>(); // Use "date" as hash key SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd", Locale.US); dateFormat.setTimeZone(TimeZone.getTimeZone("UTC")); keyConditions.put("date", new Condition().withComparisonOperator(ComparisonOperator.EQ) .withAttributeValueList(new AttributeValue(dateFormat.format(date)))); String start = fromHour > 9 ? Integer.toString(fromHour) : "0" + fromHour; if (fromHour == toHour) { // One single hour keyConditions.put("hour", new Condition().withComparisonOperator(ComparisonOperator.EQ) .withAttributeValueList(new AttributeValue(start))); } else { // Use "hour" as range key String end = toHour > 9 ? Integer.toString(toHour) : "0" + toHour; keyConditions.put("hour", new Condition().withComparisonOperator(ComparisonOperator.BETWEEN) .withAttributeValueList(new AttributeValue(start), new AttributeValue(end))); } // Prepare query request QueryRequest query = new QueryRequest().withTableName(INSTANCE_DATA_HOURLY_TABLE) .withAttributesToGet("instance_id", "date_hour", "anomaly_score").withKeyConditions(keyConditions) .withScanIndexForward(ascending).withIndexName("taurus.instance_data_hourly-date_hour_index"); Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC")); QueryResult result; String instanceId; float anomalyScore; float score; Map<String, AttributeValue> scores; Map<String, AttributeValue> lastKey; Matcher match; EnumSet<MetricType> metricMask; TaurusDataFactory dataFactory = TaurusApplication.getInstance().getDataFactory(); try { do { // Get data from DynamoDB result = _awsClient.query(query); for (Map<String, AttributeValue> item : result.getItems()) { // Convert "date_hour" to java milliseconds time match = DATE_HOUR_FORMAT_REGEX.matcher(item.get("date_hour").getS()); if (match.matches()) { calendar.clear(); calendar.set(Integer.parseInt(match.group(1)), Integer.parseInt(match.group(2)) - 1, Integer.parseInt(match.group(3)), Integer.parseInt(match.group(4)), 0, 0); instanceId = item.get("instance_id").getS(); // Get max anomaly scores scores = item.get("anomaly_score").getM(); anomalyScore = 0; double scaledScore; metricMask = EnumSet.noneOf(MetricType.class); for (Map.Entry<String, AttributeValue> entry : scores.entrySet()) { score = Float.parseFloat(entry.getValue().getN()); scaledScore = DataUtils.logScale(Math.abs(score)); if (scaledScore >= TaurusApplication.getYellowBarFloor()) { metricMask.add(MetricType.valueOf(entry.getKey())); } anomalyScore = Math.max(score, anomalyScore); } if (!callback.onData(dataFactory.createInstanceData(instanceId, AggregationType.Day, calendar.getTimeInMillis(), anomalyScore, metricMask))) { // Canceled by the user break; } } } // Make sure to get all pages lastKey = result.getLastEvaluatedKey(); query.setExclusiveStartKey(lastKey); } while (lastKey != null); } catch (AmazonClientException e) { // Wraps Amazon's unchecked exception as IOException throw new IOException(e); } }
From source file:org.openhab.persistence.dynamodb.internal.DynamoDBPersistenceService.java
License:Open Source License
private Condition constructTimeCondition(FilterCriteria filter) { boolean hasBegin = filter.getBeginDate() != null; boolean hasEnd = filter.getEndDate() != null; final Condition timeCondition; if (!hasBegin && !hasEnd) { timeCondition = null;/*from www . j a v a 2 s .c o m*/ } else if (!hasBegin && hasEnd) { timeCondition = new Condition().withComparisonOperator(ComparisonOperator.LE).withAttributeValueList( new AttributeValue().withS(AbstractDynamoDBItem.DATEFORMATTER.format(filter.getEndDate()))); } else if (hasBegin && !hasEnd) { timeCondition = new Condition().withComparisonOperator(ComparisonOperator.GE).withAttributeValueList( new AttributeValue().withS(AbstractDynamoDBItem.DATEFORMATTER.format(filter.getBeginDate()))); } else { timeCondition = new Condition().withComparisonOperator(ComparisonOperator.BETWEEN) .withAttributeValueList( new AttributeValue() .withS(AbstractDynamoDBItem.DATEFORMATTER.format(filter.getBeginDate())), new AttributeValue() .withS(AbstractDynamoDBItem.DATEFORMATTER.format(filter.getEndDate()))); } return timeCondition; }
From source file:org.selman.tweetamo.PersistentStore.java
License:Apache License
public QueryResult getLatestTweetsForScreenName(String screenName, long timestamp) throws Exception { try {//from ww w.jav a 2 s. co m long startDateMilli = System.currentTimeMillis(); Map<String, Condition> keyConditions = new HashMap<String, Condition>(); keyConditions.put(COL_SCREENNAME, new Condition().withComparisonOperator(ComparisonOperator.EQ) .withAttributeValueList(new AttributeValue().withS(screenName))); keyConditions.put(COL_CREATEDAT, new Condition().withComparisonOperator(ComparisonOperator.BETWEEN).withAttributeValueList( new AttributeValue().withN(Long.toString(timestamp)), new AttributeValue().withN(Long.toString(startDateMilli)))); QueryRequest queryRequest = new QueryRequest().withTableName(TABLE_NAME).withIndexName(INDEX_SCREENNAME) .withKeyConditions(keyConditions).withSelect(Select.ALL_ATTRIBUTES).withScanIndexForward(true); QueryResult result = dynamoDB.query(queryRequest); return result; } catch (Exception e) { handleException(e); } return null; }