List of usage examples for com.google.common.collect EvictingQueue addAll
@Override public boolean addAll(Collection<? extends E> collection)
From source file:org.codice.ddf.platform.logging.LoggingService.java
private EvictingQueue<LogEvent> createNewEvictingQueue(int newMaxLogEvents) { EvictingQueue<LogEvent> evictingQueue = EvictingQueue.create(newMaxLogEvents); evictingQueue.addAll(logEvents); return evictingQueue; }
From source file:com.streamsets.datacollector.execution.runner.common.DataObserverRunner.java
public void handleConfigurationChangeRequest(RulesConfigurationChangeRequest rulesConfigurationChangeRequest) { //update config changes this.rulesConfigurationChangeRequest = rulesConfigurationChangeRequest; //remove metrics for changed / deleted rules for (String ruleId : rulesConfigurationChangeRequest.getRulesToRemove().keySet()) { MetricsConfigurator.removeMeter(metrics, USER_PREFIX + ruleId, name, rev); MetricsConfigurator.removeCounter(metrics, USER_PREFIX + ruleId, name, rev); EvictingQueue<SampledRecord> records = ruleToSampledRecordsMap.get(ruleId); if (records != null) { records.clear();/*from w ww.java2s.com*/ } } // send RulesConfigurationChangeRequest to stats aggregator if (startsAggregatorQueue != null) { startsAggregatorQueue .offer(AggregatorUtil.createConfigChangeRequestRecord(rulesConfigurationChangeRequest)); } //resize evicting queue which retains sampled records for (Map.Entry<String, Integer> e : rulesConfigurationChangeRequest.getRulesWithSampledRecordSizeChanges() .entrySet()) { if (ruleToSampledRecordsMap.get(e.getKey()) != null) { EvictingQueue<SampledRecord> records = ruleToSampledRecordsMap.get(e.getKey()); int newSize = e.getValue(); int maxSize = configuration.get(Constants.SAMPLED_RECORDS_MAX_CACHE_SIZE_KEY, Constants.SAMPLED_RECORDS_MAX_CACHE_SIZE_DEFAULT); if (newSize > maxSize) { newSize = maxSize; } EvictingQueue<SampledRecord> newQueue = EvictingQueue.create(newSize); //this will retain only the last 'newSize' number of elements newQueue.addAll(records); ruleToSampledRecordsMap.put(e.getKey(), newQueue); } } }
From source file:org.hawkular.datamining.forecast.AutomaticForecaster.java
@Override public void update(Update update) { synchronized (selectModelLock) { if (update.getWindowSize() != null && !update.getWindowSize().equals(config.getWindowsSize())) { EvictingQueue<DataPoint> newWindow = EvictingQueue.create(update.getWindowSize()); newWindow.addAll(window); window = newWindow;// w ww .java 2 s .c o m } if (update.getConceptDriftStrategy() != null) { update.getConceptDriftStrategy().forecaster = this; } config.update(update); selectBestModel(Collections.emptyList()); } }
From source file:org.hawkular.datamining.forecast.AutomaticForecaster.java
private void selectBestModel(final List<DataPoint> dataPoints) { synchronized (selectModelLock) { final List<DataPoint> initPoints = new ArrayList<>(); initPoints.addAll(window);/*from ww w . jav a 2s .co m*/ initPoints.addAll(dataPoints); if (initPoints.isEmpty()) { return; } Logger.LOGGER.debugf("Estimating best model for: %s, previous: %s", metricContext.getMetricId(), usedModel); TimeSeriesModel bestModel = null; ModelOptimizer bestOptimizer = null; double bestIC = Double.POSITIVE_INFINITY; for (Function<MetricContext, ModelOptimizer> modelOptimizerSupplier : applicableModels) { ModelOptimizer modelOptimizer = modelOptimizerSupplier.apply(metricContext); /** * if model is defined skip others */ if (config.getModelToUse() != null && !config.getModelToUse().isOptimizedBy(modelOptimizer)) { continue; } try { if (modelOptimizer instanceof TripleExponentialSmoothing.TripleExOptimizer) { ((TripleExponentialSmoothing.TripleExOptimizer) modelOptimizer) .setPeriods(config.getPeriod()); } TimeSeriesModel currentModel = modelOptimizer.minimizedMSE(initPoints); AccuracyStatistics initStatistics = currentModel.initStatistics(); InformationCriterionHolder icHolder = new InformationCriterionHolder(initStatistics.getSse(), currentModel.numberOfParams(), initPoints.size()); Logger.LOGGER.debugf("Estimated currentModel: %s, data size: %d,init MSE: %f, %s", currentModel.toString(), initPoints.size(), initStatistics.getMse(), icHolder); double currentIc = icHolder.informationCriterion(config.getIc()); if (currentIc < bestIC) { bestIC = currentIc; bestModel = currentModel; bestOptimizer = modelOptimizer; } } catch (IllegalArgumentException ex) { continue; } } if (bestModel != null) { /** * increase windows size if the model is seasonal */ if (bestModel instanceof TripleExponentialSmoothing) { Integer periods = ((TripleExponentialSmoothing.TripleExOptimizer) bestOptimizer).getPeriods(); config.setPeriod(periods); if (config.getWindowsSize() < periods * 3) { config.setWindowsSize(periods * 3); EvictingQueue<DataPoint> newWindow = EvictingQueue.create(periods * 3); newWindow.addAll(window); window = newWindow; } } if (config.getConceptDriftStrategy() instanceof ErrorChangeStrategy) { ((ErrorChangeStrategy) config.getConceptDriftStrategy()).setError(bestModel.initStatistics()); } usedModel = bestModel; counter = 0; Logger.LOGGER.debugf("Best model for: %s, is %s, %s", metricContext.getMetricId(), bestModel.getClass().getSimpleName(), bestModel.initStatistics()); } } }
From source file:com.streamsets.datacollector.execution.runner.common.ProductionPipelineRunner.java
private void retainErrorsInMemory(Map<String, List<Record>> errorRecords, Map<String, List<ErrorMessage>> errorMessages) { synchronized (errorRecordsMutex) { for (Map.Entry<String, List<Record>> e : errorRecords.entrySet()) { EvictingQueue<Record> errorRecordList = stageToErrorRecordsMap.get(e.getKey()); if (errorRecordList == null) { //replace with a data structure with an upper cap errorRecordList = EvictingQueue .create(configuration.get(Constants.MAX_ERROR_RECORDS_PER_STAGE_KEY, Constants.MAX_ERROR_RECORDS_PER_STAGE_DEFAULT)); stageToErrorRecordsMap.put(e.getKey(), errorRecordList); }/*from w w w . j a va2 s . c om*/ errorRecordList.addAll(errorRecords.get(e.getKey())); } for (Map.Entry<String, List<ErrorMessage>> e : errorMessages.entrySet()) { EvictingQueue<ErrorMessage> errorMessageList = stageToErrorMessagesMap.get(e.getKey()); if (errorMessageList == null) { //replace with a data structure with an upper cap errorMessageList = EvictingQueue.create(configuration.get(Constants.MAX_PIPELINE_ERRORS_KEY, Constants.MAX_PIPELINE_ERRORS_DEFAULT)); stageToErrorMessagesMap.put(e.getKey(), errorMessageList); } errorMessageList.addAll(errorMessages.get(e.getKey())); } } }