Example usage for java.util.concurrent LinkedBlockingQueue clear

List of usage examples for java.util.concurrent LinkedBlockingQueue clear

Introduction

In this page you can find the example usage for java.util.concurrent LinkedBlockingQueue clear.

Prototype

public void clear() 

Source Link

Document

Atomically removes all of the elements from this queue.

Usage

From source file:com.numenta.taurus.service.TaurusDataSyncService.java

/**
 * Load all instance data from the database
 */// www .j  a va2  s. com
@Override
protected void loadAllData() throws HTMException, IOException {

    Context context = TaurusApplication.getContext();
    if (context == null) {
        // Should not happen.
        // We need application context to run.
        return;
    }

    // Get last known date from the database
    final TaurusDatabase database = TaurusApplication.getDatabase();
    if (database == null) {
        // Should not happen.
        // We need application context to run.
        return;
    }
    long from = database.getLastTimestamp();

    // Get current time
    final long now = System.currentTimeMillis();

    // The server updates the instance data table into hourly buckets as the models process
    // data. This may leave the last hour with outdated values when the server updates the
    // instance data table after we start loading the new hourly bucket.
    // To make sure the last hour bucket is updated we should get data since last update up to
    // now and on when the time is above a certain threshold (15 minutes) also download the
    // previous hour once.
    SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);

    // Check if we need to update the previous hour
    long previousHourThreshold = prefs.getLong(PREF_PREVIOUS_HOUR_THRESHOLD, now);
    if (now >= previousHourThreshold) {
        // Download the previous hour
        from -= DataUtils.MILLIS_PER_HOUR;

        // Set threshold time to minute 15 of next hour
        Calendar calendar = Calendar.getInstance();
        calendar.setTimeInMillis(now);
        calendar.add(Calendar.HOUR, 1);
        calendar.set(Calendar.MINUTE, 15);
        calendar.set(Calendar.SECOND, 0);
        calendar.set(Calendar.MILLISECOND, 0);
        prefs.edit().putLong(PREF_PREVIOUS_HOUR_THRESHOLD, calendar.getTimeInMillis()).apply();
    }
    final long oldestTimestamp = DataUtils
            .floorTo60minutes(now - TaurusApplication.getNumberOfDaysToSync() * DataUtils.MILLIS_PER_DAY);

    // Check if we need to catch up and download old data
    if (database.getFirstTimestamp() > oldestTimestamp) {
        from = oldestTimestamp;
    }

    // Don't get date older than NUMBER_OF_DAYS_TO_SYNC
    from = Math.max(from, oldestTimestamp);

    // Blocking queue holding data waiting to be saved to the database.
    // This queue will be filled by the TaurusClient as it downloads data and it will be
    // emptied by the databaseTask as is saves data to the database
    final LinkedBlockingQueue<InstanceData> pending = new LinkedBlockingQueue<InstanceData>(
            PENDING_IO_BUFFER_SIZE);

    // Background task used save data to the database. This task will wait for data to arrive
    // from the server and save them to the database in batches until it finds the end of the
    // queue marked by DATA_EOF or it times out after 60 seconds
    final Future<?> databaseTask = getService().getIOThreadPool().submit(new Runnable() {
        @Override
        public void run() {
            // Save data in batches, one day at the time
            final List<InstanceData> batch = new ArrayList<InstanceData>();
            int batchSize = -DataUtils.MILLIS_PER_HOUR;

            // Tracks batch timestamp. Once the data timestamp is greater than the batch
            // timestamp, a new batch is created
            long batchTimestamp = now - DataUtils.MILLIS_PER_HOUR;

            try {
                // Process all pending data until the DATA_EOF is found or a timeout is reached
                InstanceData data;
                while ((data = pending.poll(60, TimeUnit.SECONDS)) != DATA_EOF && data != null) {
                    batch.add(data);
                    // Process batches
                    if (data.getTimestamp() < batchTimestamp) {
                        // Calculate next batch timestamp
                        batchTimestamp = data.getTimestamp() + batchSize;
                        if (database.addInstanceDataBatch(batch)) {
                            // Notify receivers new data has arrived
                            fireInstanceDataChangedEvent();
                        }
                        batch.clear();
                    }
                }
                // Last batch
                if (!batch.isEmpty()) {
                    if (database.addInstanceDataBatch(batch)) {
                        // Notify receivers new data has arrived
                        fireInstanceDataChangedEvent();
                    }
                }
            } catch (InterruptedException e) {
                Log.w(TAG, "Interrupted while loading data");
            }
        }
    });

    try {
        // Get new data from server
        Log.d(TAG, "Start downloading data from " + from);
        TaurusClient client = getClient();
        client.getAllInstanceData(new Date(from), new Date(now), false,
                new HTMClient.DataCallback<InstanceData>() {
                    @Override
                    public boolean onData(InstanceData data) {
                        // enqueue data for saving
                        try {
                            pending.put(data);
                        } catch (InterruptedException e) {
                            pending.clear();
                            Log.w(TAG, "Interrupted while loading data");
                            return false;
                        }
                        return true;
                    }
                });
        // Mark the end of the records
        pending.add(DATA_EOF);
        // Wait for the database task to complete
        databaseTask.get();
        // Clear client cache
        client.clearCache();
    } catch (InterruptedException e) {
        Log.w(TAG, "Interrupted while loading data");
    } catch (ExecutionException e) {
        Log.e(TAG, "Failed to load data", e);
    }
}

From source file:com.groksolutions.grok.mobile.service.GrokDataSyncService.java

/**
 * Loads metric data from the server/*from  w  ww .  ja  v  a 2  s  .  com*/
 *
 * @param metricId (optional) The metric Id to get the data. If metricId is {@code null} then
 *                 loads data for all metrics at once.
 * @param from     return records from this date
 * @param to       return records up to this date
 * @see HTMClient#getMetricData
 */
private void loadMetricData(final String metricId, final long from, final long to)
        throws HTMException, IOException {

    if (getClient() == null) {
        Log.w(TAG, "Not connected to any server yet");
        return;
    }
    final CoreDatabase database = HTMITApplication.getDatabase();

    // Blocking queue holding metric data waiting to be saved to the
    // database. This queue will be filled by the HTMClient as it downloads
    // the metric data and it will be emptied by the databaseTask as is
    // saves the data to the database
    final LinkedBlockingQueue<MetricData> pending = new LinkedBlockingQueue<>(
            MAX_PENDING_METRIC_DATA_IO_BUFFER);

    // Background task used save metric data to the database. This task will
    // wait for metric data to arrive from the server and save them to the
    // database in batches until it finds the end of the queue marked by
    // METRIC_DATA_EOF or it times out after 60 seconds
    final Future<?> databaseTask = getService().getIOThreadPool().submit(new Runnable() {
        @Override
        public void run() {

            // Make the batch size 1 hour for all metrics or one week for
            // single metric
            int batchSize = metricId == null ? DataUtils.MILLIS_PER_HOUR : 24 * 7 * DataUtils.MILLIS_PER_HOUR;

            // Save metrics in batches, 24 hours at the time
            final List<MetricData> batch = new ArrayList<>();

            // Tracks batch timestamp. Once the metric timestamp is greater
            // than the batch timestamp, a new batch is created
            long batchTimestamp = 0;

            try {
                // Process all pending metric data until the METRIC_DATA_EOF
                // is found or a timeout is reached
                MetricData metricData;
                while ((metricData = pending.poll(60, TimeUnit.SECONDS)) != METRIC_DATA_EOF
                        && metricData != null) {
                    // Add metric data to batch regardless of the timestamp.
                    // At this point we may receive stale metric data with
                    // lower timestamp after we receive the latest data with
                    // the current timestamp. As a side effect, you may see
                    // gaps in the data as described in MER-1524
                    batch.add(metricData);
                    // Process batches
                    if (metricData.getTimestamp() > batchTimestamp) {
                        // Calculate next batch timestamp
                        batchTimestamp = metricData.getTimestamp() + batchSize;
                        if (database.addMetricDataBatch(batch)) {
                            Log.d(TAG, "Saving " + batch.size() + " new records");
                            // Notify receivers new data has arrived
                            fireMetricDataChangedEvent();
                        }
                        batch.clear();
                    }
                }
                // Last batch
                if (!batch.isEmpty()) {
                    if (database.addMetricDataBatch(batch)) {
                        Log.d(TAG, "Received " + batch.size() + " records");
                        // Notify receivers new data has arrived
                        fireMetricDataChangedEvent();
                    }
                }
            } catch (InterruptedException e) {
                Log.w(TAG, "Interrupted while loading metric data");
            }
        }
    });

    try {
        // Get new data from server
        getClient().getMetricData(metricId, new Date(from), new Date(to),
                new HTMClient.DataCallback<MetricData>() {
                    @Override
                    public boolean onData(MetricData metricData) {
                        // enqueue data for saving
                        try {
                            Metric metric = database.getMetric(metricData.getMetricId());
                            if (metric == null) {
                                Log.w(TAG, "Received data for unknown metric:" + metricData.getMetricId());
                                return true;
                            }
                            pending.put(metricData);
                        } catch (InterruptedException e) {
                            pending.clear();
                            Log.w(TAG, "Interrupted while loading metric data");
                            return false;
                        }
                        return true;
                    }
                });
        // Mark the end of the records
        pending.add(METRIC_DATA_EOF);
        // Wait for the database task to complete
        databaseTask.get();
    } catch (InterruptedException e) {
        Log.w(TAG, "Interrupted while loading metric data");
    } catch (ExecutionException e) {
        Log.e(TAG, "Failed to load metric data", e);
    }
}

From source file:com.YOMPsolutions.YOMP.mobile.service.YOMPDataSyncService.java

/**
 * Loads metric data from the server/*www .j a v a  2 s  .com*/
 *
 * @param metricId (optional) The metric Id to get the data. If metricId is {@code null} then
 *                 loads data for all metrics at once.
 * @param from     return records from this date
 * @param to       return records up to this date
 * @see com.numenta.core.service.YOMPClient#getMetricData
 */
private void loadMetricData(final String metricId, final long from, final long to)
        throws YOMPException, IOException {

    if (getClient() == null) {
        Log.w(TAG, "Not connected to any server yet");
        return;
    }
    final CoreDatabase database = YOMPApplication.getDatabase();

    // Blocking queue holding metric data waiting to be saved to the
    // database. This queue will be filled by the YOMPClient as it downloads
    // the metric data and it will be emptied by the databaseTask as is
    // saves the data to the database
    final LinkedBlockingQueue<MetricData> pending = new LinkedBlockingQueue<>(
            MAX_PENDING_METRIC_DATA_IO_BUFFER);

    // Background task used save metric data to the database. This task will
    // wait for metric data to arrive from the server and save them to the
    // database in batches until it finds the end of the queue marked by
    // METRIC_DATA_EOF or it times out after 60 seconds
    final Future<?> databaseTask = getService().getIOThreadPool().submit(new Runnable() {
        @Override
        public void run() {

            // Make the batch size 1 hour for all metrics or one week for
            // single metric
            int batchSize = metricId == null ? DataUtils.MILLIS_PER_HOUR : 24 * 7 * DataUtils.MILLIS_PER_HOUR;

            // Save metrics in batches, 24 hours at the time
            final List<MetricData> batch = new ArrayList<>();

            // Tracks batch timestamp. Once the metric timestamp is greater
            // than the batch timestamp, a new batch is created
            long batchTimestamp = 0;

            try {
                // Process all pending metric data until the METRIC_DATA_EOF
                // is found or a timeout is reached
                MetricData metricData;
                while ((metricData = pending.poll(60, TimeUnit.SECONDS)) != METRIC_DATA_EOF
                        && metricData != null) {
                    // Add metric data to batch regardless of the timestamp.
                    // At this point we may receive stale metric data with
                    // lower timestamp after we receive the latest data with
                    // the current timestamp. As a side effect, you may see
                    // gaps in the data as described in MER-1524
                    batch.add(metricData);
                    // Process batches
                    if (metricData.getTimestamp() > batchTimestamp) {
                        // Calculate next batch timestamp
                        batchTimestamp = metricData.getTimestamp() + batchSize;
                        if (database.addMetricDataBatch(batch)) {
                            Log.d(TAG, "Saving " + batch.size() + " new records");
                            // Notify receivers new data has arrived
                            fireMetricDataChangedEvent();
                        }
                        batch.clear();
                    }
                }
                // Last batch
                if (!batch.isEmpty()) {
                    if (database.addMetricDataBatch(batch)) {
                        Log.d(TAG, "Received " + batch.size() + " records");
                        // Notify receivers new data has arrived
                        fireMetricDataChangedEvent();
                    }
                }
            } catch (InterruptedException e) {
                Log.w(TAG, "Interrupted while loading metric data");
            }
        }
    });

    try {
        // Get new data from server
        getClient().getMetricData(metricId, new Date(from), new Date(to),
                new YOMPClient.DataCallback<MetricData>() {
                    @Override
                    public boolean onData(MetricData metricData) {
                        // enqueue data for saving
                        try {
                            Metric metric = database.getMetric(metricData.getMetricId());
                            if (metric == null) {
                                Log.w(TAG, "Received data for unknown metric:" + metricData.getMetricId());
                                return true;
                            }
                            pending.put(metricData);
                        } catch (InterruptedException e) {
                            pending.clear();
                            Log.w(TAG, "Interrupted while loading metric data");
                            return false;
                        }
                        return true;
                    }
                });
        // Mark the end of the records
        pending.add(METRIC_DATA_EOF);
        // Wait for the database task to complete
        databaseTask.get();
    } catch (InterruptedException e) {
        Log.w(TAG, "Interrupted while loading metric data");
    } catch (ExecutionException e) {
        Log.e(TAG, "Failed to load metric data", e);
    }
}