List of usage examples for java.util.concurrent CopyOnWriteArrayList CopyOnWriteArrayList
public CopyOnWriteArrayList()
From source file:org.apache.camel.component.mock.MockEndpoint.java
private void init() { expectedCount = -1;/*from w w w . ja va 2s. c o m*/ counter = 0; processors = new HashMap<Integer, Processor>(); receivedExchanges = new CopyOnWriteArrayList<Exchange>(); failures = new CopyOnWriteArrayList<Throwable>(); tests = new CopyOnWriteArrayList<Runnable>(); latch = null; sleepForEmptyTest = 0; resultWaitTime = 0; resultMinimumWaitTime = 0L; expectedMinimumCount = -1; expectedBodyValues = null; actualBodyValues = new ArrayList<Object>(); }
From source file:org.wso2.carbon.event.processor.core.internal.CarbonEventProcessorService.java
public void addExecutionPlanConfigurationFile(ExecutionPlanConfigurationFile configurationFile) { int tenantId = PrivilegedCarbonContext.getThreadLocalCarbonContext().getTenantId(); List<ExecutionPlanConfigurationFile> executionPlanConfigurationFiles = tenantSpecificExecutionPlanFiles .get(tenantId);/* ww w.ja v a 2 s . c o m*/ if (executionPlanConfigurationFiles == null) { executionPlanConfigurationFiles = new CopyOnWriteArrayList<>(); tenantSpecificExecutionPlanFiles.put(tenantId, executionPlanConfigurationFiles); } executionPlanConfigurationFiles.add(configurationFile); }
From source file:com.web.server.EARDeployer.java
public void obtainUrls(FileObject rootEar, FileObject ear, CopyOnWriteArrayList<FileObject> fileObjects, ConcurrentHashMap jarclassListMap, CopyOnWriteArrayList<FileObject> warObjects, StandardFileSystemManager fsManager) throws IOException { FileObject[] childrenJars = ear.getChildren(); for (int childcount = 0; childcount < childrenJars.length; childcount++) { if (childrenJars[childcount].getType() == FileType.FOLDER) { obtainUrls(rootEar, childrenJars[childcount], fileObjects, jarclassListMap, warObjects, fsManager); }/*from ww w. ja v a 2 s . co m*/ // System.out.println(childrenJars[childcount]); // System.out.println(childrenJars[childcount].getName().getBaseName()); // System.out.println(ear.getURL()); if (childrenJars[childcount].getType() == FileType.FILE && (childrenJars[childcount].getName().getBaseName().endsWith(".jar") || childrenJars[childcount].getName().getBaseName().endsWith(".war"))) { // System.out.println(childrenJars[childcount].getURL()); if (childrenJars[childcount].getName().getBaseName().endsWith(".war")) { File file = new File(scanDirectory + "/" + childrenJars[childcount].getName().getBaseName()); if (!file.exists() || (file.exists() && file.lastModified() != childrenJars[childcount] .getContent().getLastModifiedTime())) { InputStream fistr = childrenJars[childcount].getContent().getInputStream(); byte[] filyByt = new byte[4096]; FileOutputStream warFile = new FileOutputStream( scanDirectory + "/" + childrenJars[childcount].getName().getBaseName()); int len = 0; while ((len = fistr.read(filyByt)) != -1) { warFile.write(filyByt, 0, len); } warFile.close(); fistr.close(); warObjects.add(childrenJars[childcount]); } } // System.out.println(childrenJars[childcount].getURL().toString()+" "+rootEar.getURL()); else if (!childrenJars[childcount].getURL().toString().trim() .startsWith(rootEar.getURL().toString() + "lib/")) { CopyOnWriteArrayList<String> classList = new CopyOnWriteArrayList<String>(); getClassList(childrenJars[childcount], classList, fsManager); jarclassListMap.put(childrenJars[childcount], classList); } else { System.out.println("ear libs/" + childrenJars[childcount]); fileObjects.add(childrenJars[childcount]); } } else { childrenJars[childcount].close(); } } }
From source file:org.jenkinsci.plugins.workflow.cps.CpsFlowExecution.java
@Override public void addListener(GraphListener listener) { if (listeners == null) { listeners = new CopyOnWriteArrayList<GraphListener>(); }//from w ww. j a va2s. co m listeners.add(listener); }
From source file:uk.ac.kcl.texthunter.core.MLModelMaker.java
public void prepareForBlastOff() { this.alXValidate = new CopyOnWriteArrayList(); this.allClassGapp = new File(applicationLocation + File.separator + "all_classes.gapp"); this.MLConfigFile = applicationLocation + File.separator + "MLconfig.xml"; this.outerCorpSizeRatio = 5; if (this.multiClassMode) { System.out.println("TextHunter is in MultiClass mode"); } else {/*from w w w. j a v a 2 s. co m*/ System.out.println("TextHunter is in Positive Instance mode"); } }
From source file:com.flexive.ejb.beans.ScriptingEngineBean.java
/** * Execute all runOnce scripts in the resource denoted by prefix if param is "false" * * @param param boolean parameter that will be flagged as "true" once the scripts are run * @param dropName the drop application name * @param prefix resource directory prefix * @param applicationName the corresponding application name (for debug messages) * @throws FxApplicationException on errors *//*from w w w . j a v a 2 s . c om*/ private void runOnce(Parameter<Boolean> param, String dropName, String prefix, String applicationName) throws FxApplicationException { synchronized (RUNONCE_LOCK) { try { Boolean executed = getDivisionConfigurationEngine().get(param); if (executed) { return; } } catch (FxApplicationException e) { LOG.error(e); return; } //noinspection unchecked final ArrayList<FxScriptRunInfo> divisionRunOnceInfos = Lists .newArrayList(getDivisionConfigurationEngine().get(SystemParameters.DIVISION_RUNONCE_INFOS)); LocalScriptingCache.runOnceInfos = new CopyOnWriteArrayList<FxScriptRunInfo>(); LocalScriptingCache.runOnceInfos.addAll(divisionRunOnceInfos); FxContext.get().setExecutingRunOnceScripts(true); try { executeInitializerScripts("runonce", dropName, prefix, applicationName, new RunonceScriptExecutor(applicationName)); } finally { FxContext.get().setExecutingRunOnceScripts(false); } // TODO: this fails to update the script infos when the transaction was rolled back because of a script error FxContext.get().runAsSystem(); try { divisionRunOnceInfos.clear(); divisionRunOnceInfos.addAll(LocalScriptingCache.runOnceInfos); getDivisionConfigurationEngine().put(SystemParameters.DIVISION_RUNONCE_INFOS, divisionRunOnceInfos); getDivisionConfigurationEngine().put(param, true); LocalScriptingCache.runOnceInfos = null; } finally { FxContext.get().stopRunAsSystem(); } } }
From source file:com.l2jfree.gameserver.model.entity.events.CTF.java
private static boolean startEventOk() { if (_joining || !_teleport || _started) return false; if (Config.CTF_EVEN_TEAMS.equals("NO") || Config.CTF_EVEN_TEAMS.equals("BALANCE")) { if (_teamPlayersCount.contains(0)) return false; } else if (Config.CTF_EVEN_TEAMS.equals("SHUFFLE")) { CopyOnWriteArrayList<L2Player> playersShuffleTemp = new CopyOnWriteArrayList<L2Player>(); int loopCount = 0; loopCount = _playersShuffle.size(); for (int i = 0; i < loopCount; i++) { if (_playersShuffle != null) playersShuffleTemp.add(_playersShuffle.get(i)); }/*from w w w . j a va 2 s . co m*/ _playersShuffle = playersShuffleTemp; playersShuffleTemp.clear(); // if (_playersShuffle.size() < (_teams.size()*2)){ // return false; // } } return true; }
From source file:ml.shifu.shifu.core.dtrain.dt.DTMaster.java
@Override public void init(MasterContext<DTMasterParams, DTWorkerParams> context) { Properties props = context.getProps(); // init model config and column config list at first SourceType sourceType;// www . j a va 2s . c o m try { sourceType = SourceType .valueOf(props.getProperty(CommonConstants.MODELSET_SOURCE_TYPE, SourceType.HDFS.toString())); this.modelConfig = CommonUtils.loadModelConfig(props.getProperty(CommonConstants.SHIFU_MODEL_CONFIG), sourceType); this.columnConfigList = CommonUtils .loadColumnConfigList(props.getProperty(CommonConstants.SHIFU_COLUMN_CONFIG), sourceType); } catch (IOException e) { throw new RuntimeException(e); } // worker number is used to estimate nodes per iteration for stats this.workerNumber = NumberFormatUtils.getInt(props.getProperty(GuaguaConstants.GUAGUA_WORKER_NUMBER), true); // check if variables are set final selected int[] inputOutputIndex = DTrainUtils.getNumericAndCategoricalInputAndOutputCounts(this.columnConfigList); this.inputNum = inputOutputIndex[0] + inputOutputIndex[1]; this.isAfterVarSelect = (inputOutputIndex[3] == 1); // cache all feature list for sampling features this.allFeatures = this.getAllFeatureList(columnConfigList, isAfterVarSelect); int trainerId = Integer.valueOf(context.getProps().getProperty(CommonConstants.SHIFU_TRAINER_ID, "0")); // If grid search, select valid paramters, if not parameters is what in ModelConfig.json GridSearch gs = new GridSearch(modelConfig.getTrain().getParams(), modelConfig.getTrain().getGridConfigFileContent()); Map<String, Object> validParams = this.modelConfig.getTrain().getParams(); if (gs.hasHyperParam()) { validParams = gs.getParams(trainerId); LOG.info("Start grid search master with params: {}", validParams); } Object vtObj = validParams.get("ValidationTolerance"); if (vtObj != null) { try { validationTolerance = Double.parseDouble(vtObj.toString()); LOG.warn("Validation by tolerance is enabled with value {}.", validationTolerance); } catch (NumberFormatException ee) { validationTolerance = 0d; LOG.warn( "Validation by tolerance isn't enabled because of non numerical value of ValidationTolerance: {}.", vtObj); } } else { LOG.warn("Validation by tolerance isn't enabled."); } // tree related parameters initialization Object fssObj = validParams.get("FeatureSubsetStrategy"); if (fssObj != null) { try { this.featureSubsetRate = Double.parseDouble(fssObj.toString()); // no need validate featureSubsetRate is in (0,1], as already validated in ModelInspector this.featureSubsetStrategy = null; } catch (NumberFormatException ee) { this.featureSubsetStrategy = FeatureSubsetStrategy.of(fssObj.toString()); } } else { LOG.warn("FeatureSubsetStrategy is not set, set to TWOTHRIDS by default in DTMaster."); this.featureSubsetStrategy = FeatureSubsetStrategy.TWOTHIRDS; this.featureSubsetRate = 0; } // max depth Object maxDepthObj = validParams.get("MaxDepth"); if (maxDepthObj != null) { this.maxDepth = Integer.valueOf(maxDepthObj.toString()); } else { this.maxDepth = 10; } // max leaves which is used for leaf-wised tree building, TODO add more benchmarks Object maxLeavesObj = validParams.get("MaxLeaves"); if (maxLeavesObj != null) { this.maxLeaves = Integer.valueOf(maxLeavesObj.toString()); } else { this.maxLeaves = -1; } // enable leaf wise tree building once maxLeaves is configured if (this.maxLeaves > 0) { this.isLeafWise = true; } // maxBatchSplitSize means each time split # of batch nodes Object maxBatchSplitSizeObj = validParams.get("MaxBatchSplitSize"); if (maxBatchSplitSizeObj != null) { this.maxBatchSplitSize = Integer.valueOf(maxBatchSplitSizeObj.toString()); } else { // by default split 32 at most in a batch this.maxBatchSplitSize = 32; } assert this.maxDepth > 0 && this.maxDepth <= 20; // hide in parameters, this to avoid OOM issue for each iteration Object maxStatsMemoryMB = validParams.get("MaxStatsMemoryMB"); if (maxStatsMemoryMB != null) { this.maxStatsMemory = Long.valueOf(validParams.get("MaxStatsMemoryMB").toString()) * 1024 * 1024; if (this.maxStatsMemory > ((2L * Runtime.getRuntime().maxMemory()) / 3)) { // if >= 2/3 max memory, take 2/3 max memory to avoid OOM this.maxStatsMemory = ((2L * Runtime.getRuntime().maxMemory()) / 3); } } else { // by default it is 1/2 of heap, about 1.5G setting in current Shifu this.maxStatsMemory = Runtime.getRuntime().maxMemory() / 2L; } // assert this.maxStatsMemory <= Math.min(Runtime.getRuntime().maxMemory() * 0.6, 800 * 1024 * 1024L); this.treeNum = Integer.valueOf(validParams.get("TreeNum").toString()); this.isRF = ALGORITHM.RF.toString().equalsIgnoreCase(modelConfig.getAlgorithm()); this.isGBDT = ALGORITHM.GBT.toString().equalsIgnoreCase(modelConfig.getAlgorithm()); if (this.isGBDT) { // learning rate only effective in gbdt this.learningRate = Double.valueOf(validParams.get(CommonConstants.LEARNING_RATE).toString()); } // initialize impurity type according to regression or classfication String imStr = validParams.get("Impurity").toString(); int numClasses = 2; if (this.modelConfig.isClassification()) { numClasses = this.modelConfig.getTags().size(); } // these two parameters is to stop tree growth parameters int minInstancesPerNode = Integer.valueOf(validParams.get("MinInstancesPerNode").toString()); double minInfoGain = Double.valueOf(validParams.get("MinInfoGain").toString()); if (imStr.equalsIgnoreCase("entropy")) { impurity = new Entropy(numClasses, minInstancesPerNode, minInfoGain); } else if (imStr.equalsIgnoreCase("gini")) { impurity = new Gini(numClasses, minInstancesPerNode, minInfoGain); } else { impurity = new Variance(minInstancesPerNode, minInfoGain); } // checkpoint folder and interval (every # iterations to do checkpoint) this.checkpointInterval = NumberFormatUtils .getInt(context.getProps().getProperty(CommonConstants.SHIFU_DT_MASTER_CHECKPOINT_INTERVAL, "20")); this.checkpointOutput = new Path(context.getProps() .getProperty(CommonConstants.SHIFU_DT_MASTER_CHECKPOINT_FOLDER, "tmp/cp_" + context.getAppId())); // cache conf to avoid new this.conf = new Configuration(); // if continuous model training is enabled this.isContinuousEnabled = Boolean.TRUE.toString() .equalsIgnoreCase(context.getProps().getProperty(CommonConstants.CONTINUOUS_TRAINING)); this.dtEarlyStopDecider = new DTEarlyStopDecider(this.maxDepth); if (validParams.containsKey("EnableEarlyStop") && Boolean.valueOf(validParams.get("EnableEarlyStop").toString().toLowerCase())) { this.enableEarlyStop = true; } LOG.info( "Master init params: isAfterVarSel={}, featureSubsetStrategy={}, featureSubsetRate={} maxDepth={}, maxStatsMemory={}, " + "treeNum={}, impurity={}, workerNumber={}, minInstancesPerNode={}, minInfoGain={}, isRF={}, " + "isGBDT={}, isContinuousEnabled={}, enableEarlyStop={}.", isAfterVarSelect, featureSubsetStrategy, this.featureSubsetRate, maxDepth, maxStatsMemory, treeNum, imStr, this.workerNumber, minInstancesPerNode, minInfoGain, this.isRF, this.isGBDT, this.isContinuousEnabled, this.enableEarlyStop); this.toDoQueue = new LinkedList<TreeNode>(); if (this.isLeafWise) { this.toSplitQueue = new PriorityQueue<TreeNode>(64, new Comparator<TreeNode>() { @Override public int compare(TreeNode o1, TreeNode o2) { return Double.compare(o2.getNode().getWgtCntRatio() * o2.getNode().getGain(), o1.getNode().getWgtCntRatio() * o1.getNode().getGain()); } }); } // initialize trees if (context.isFirstIteration()) { if (this.isRF) { // for random forest, trees are trained in parallel this.trees = new CopyOnWriteArrayList<TreeNode>(); for (int i = 0; i < treeNum; i++) { this.trees.add(new TreeNode(i, new Node(Node.ROOT_INDEX), 1d)); } } if (this.isGBDT) { if (isContinuousEnabled) { TreeModel existingModel; try { Path modelPath = new Path(context.getProps().getProperty(CommonConstants.GUAGUA_OUTPUT)); existingModel = (TreeModel) ModelSpecLoaderUtils.loadModel(modelConfig, modelPath, ShifuFileUtils .getFileSystemBySourceType(this.modelConfig.getDataSet().getSource())); if (existingModel == null) { // null means no existing model file or model file is in wrong format this.trees = new CopyOnWriteArrayList<TreeNode>(); this.trees.add(new TreeNode(0, new Node(Node.ROOT_INDEX), 1d));// learning rate is 1 for 1st LOG.info("Starting to train model from scratch and existing model is empty."); } else { this.trees = existingModel.getTrees(); this.existingTreeSize = this.trees.size(); // starting from existing models, first tree learning rate is current learning rate this.trees.add(new TreeNode(this.existingTreeSize, new Node(Node.ROOT_INDEX), this.existingTreeSize == 0 ? 1d : this.learningRate)); LOG.info("Starting to train model from existing model {} with existing trees {}.", modelPath, existingTreeSize); } } catch (IOException e) { throw new GuaguaRuntimeException(e); } } else { this.trees = new CopyOnWriteArrayList<TreeNode>(); // for GBDT, initialize the first tree. trees are trained sequentially,first tree learning rate is 1 this.trees.add(new TreeNode(0, new Node(Node.ROOT_INDEX), 1.0d)); } } } else { // recover all states once master is fail over LOG.info("Recover master status from checkpoint file {}", this.checkpointOutput); recoverMasterStatus(sourceType); } }
From source file:io.druid.indexing.common.task.AppenderatorDriverRealtimeIndexTaskTest.java
private void makeToolboxFactory(final File directory) { taskStorage = new HeapMemoryTaskStorage(new TaskStorageConfig(null)); taskLockbox = new TaskLockbox(taskStorage); publishedSegments = new CopyOnWriteArrayList<>(); ObjectMapper mapper = new DefaultObjectMapper(); mapper.registerSubtypes(LinearShardSpec.class); mapper.registerSubtypes(NumberedShardSpec.class); IndexerSQLMetadataStorageCoordinator mdc = new IndexerSQLMetadataStorageCoordinator(mapper, derbyConnectorRule.metadataTablesConfigSupplier().get(), derbyConnectorRule.getConnector()) { @Override//from w ww.j a va2 s .co m public Set<DataSegment> announceHistoricalSegments(Set<DataSegment> segments) throws IOException { Set<DataSegment> result = super.announceHistoricalSegments(segments); Assert.assertFalse("Segment latch not initialized, did you forget to call expectPublishSegments?", segmentLatch == null); publishedSegments.addAll(result); segments.forEach(s -> segmentLatch.countDown()); return result; } @Override public SegmentPublishResult announceHistoricalSegments(Set<DataSegment> segments, DataSourceMetadata startMetadata, DataSourceMetadata endMetadata) throws IOException { SegmentPublishResult result = super.announceHistoricalSegments(segments, startMetadata, endMetadata); Assert.assertFalse("Segment latch not initialized, did you forget to call expectPublishSegments?", segmentLatch == null); publishedSegments.addAll(result.getSegments()); result.getSegments().forEach(s -> segmentLatch.countDown()); return result; } }; final TaskConfig taskConfig = new TaskConfig(directory.getPath(), null, null, 50000, null, false, null, null); final TaskActionToolbox taskActionToolbox = new TaskActionToolbox(taskLockbox, mdc, emitter, EasyMock.createMock(SupervisorManager.class)); final TaskActionClientFactory taskActionClientFactory = new LocalTaskActionClientFactory(taskStorage, taskActionToolbox); IntervalChunkingQueryRunnerDecorator queryRunnerDecorator = new IntervalChunkingQueryRunnerDecorator(null, null, null) { @Override public <T> QueryRunner<T> decorate(QueryRunner<T> delegate, QueryToolChest<T, ? extends Query<T>> toolChest) { return delegate; } }; final QueryRunnerFactoryConglomerate conglomerate = new DefaultQueryRunnerFactoryConglomerate( ImmutableMap.<Class<? extends Query>, QueryRunnerFactory>of(TimeseriesQuery.class, new TimeseriesQueryRunnerFactory(new TimeseriesQueryQueryToolChest(queryRunnerDecorator), new TimeseriesQueryEngine(), new QueryWatcher() { @Override public void registerQuery(Query query, ListenableFuture future) { // do nothing } }))); handOffCallbacks = new ConcurrentHashMap<>(); final SegmentHandoffNotifierFactory handoffNotifierFactory = new SegmentHandoffNotifierFactory() { @Override public SegmentHandoffNotifier createSegmentHandoffNotifier(String dataSource) { return new SegmentHandoffNotifier() { @Override public boolean registerSegmentHandoffCallback(SegmentDescriptor descriptor, Executor exec, Runnable handOffRunnable) { handOffCallbacks.put(descriptor, new Pair<>(exec, handOffRunnable)); handoffLatch.countDown(); return true; } @Override public void start() { //Noop } @Override public void close() { //Noop } }; } }; final TestUtils testUtils = new TestUtils(); rowIngestionMetersFactory = testUtils.getRowIngestionMetersFactory(); SegmentLoaderConfig segmentLoaderConfig = new SegmentLoaderConfig() { @Override public List<StorageLocationConfig> getLocations() { return Lists.newArrayList(); } }; taskToolboxFactory = new TaskToolboxFactory(taskConfig, taskActionClientFactory, emitter, new TestDataSegmentPusher(), new TestDataSegmentKiller(), null, // DataSegmentMover null, // DataSegmentArchiver new TestDataSegmentAnnouncer(), EasyMock.createNiceMock(DataSegmentServerAnnouncer.class), handoffNotifierFactory, () -> conglomerate, MoreExecutors.sameThreadExecutor(), // queryExecutorService EasyMock.createMock(MonitorScheduler.class), new SegmentLoaderFactory(new SegmentLoaderLocalCacheManager(null, segmentLoaderConfig, testUtils.getTestObjectMapper())), testUtils.getTestObjectMapper(), testUtils.getTestIndexIO(), MapCache.create(1024), new CacheConfig(), testUtils.getTestIndexMergerV9(), EasyMock.createNiceMock(DruidNodeAnnouncer.class), EasyMock.createNiceMock(DruidNode.class), new LookupNodeService("tier"), new DataNodeService("tier", 1000, ServerType.INDEXER_EXECUTOR, 0), new TaskReportFileWriter(reportsFile)); }