List of usage examples for java.util.concurrent CopyOnWriteArrayList CopyOnWriteArrayList
public CopyOnWriteArrayList()
From source file:Ternary.java
public void shiftLeft(int num) { CopyOnWriteArrayList<Trit> newTrits = new CopyOnWriteArrayList<Trit>(); for (int i = 0; i < num; i++) newTrits.add(Trit.NEUTRAL);//from ww w.j ava 2 s . c o m newTrits.addAll(trits); trits = newTrits; }
From source file:org.springsource.ide.eclipse.commons.browser.swt.StsBrowserManager.java
/** * Calls Javascript functions <i>to</i> the browser, refreshing the browser * after each call// w ww. j a v a2 s . c o m */ public void callOnBrowser(final Collection<IEclipseToBrowserFunction> functions) { final Collection<IEclipseToBrowserFunction> waitingFunctions = new CopyOnWriteArrayList<IEclipseToBrowserFunction>(); IEclipseToBrowserFunction.Callback callback = new IEclipseToBrowserFunction.Callback() { @Override public void ready(final IEclipseToBrowserFunction function) { if (waitingFunctions.remove(function)) { if (!disposed) { doCall(function); } } } }; for (IEclipseToBrowserFunction function : functions) { if (!function.isReady()) { waitingFunctions.add(function); function.setCallback(callback); } else { doCall(function); } } callback.ready(null); }
From source file:org.springframework.web.servlet.support.AbstractFlashMapManager.java
@Override public final void saveOutputFlashMap(FlashMap flashMap, HttpServletRequest request, HttpServletResponse response) {// w w w. j ava2 s . c o m if (CollectionUtils.isEmpty(flashMap)) { return; } String path = decodeAndNormalizePath(flashMap.getTargetRequestPath(), request); flashMap.setTargetRequestPath(path); if (logger.isDebugEnabled()) { logger.debug("Saving FlashMap=" + flashMap); } flashMap.startExpirationPeriod(getFlashMapTimeout()); Object mutex = getFlashMapsMutex(request); if (mutex != null) { synchronized (mutex) { List<FlashMap> allFlashMaps = retrieveFlashMaps(request); allFlashMaps = (allFlashMaps != null ? allFlashMaps : new CopyOnWriteArrayList<>()); allFlashMaps.add(flashMap); updateFlashMaps(allFlashMaps, request, response); } } else { List<FlashMap> allFlashMaps = retrieveFlashMaps(request); allFlashMaps = (allFlashMaps != null ? allFlashMaps : new LinkedList<>()); allFlashMaps.add(flashMap); updateFlashMaps(allFlashMaps, request, response); } }
From source file:com.web.server.WarDeployer.java
public WarDeployer(String scanDirectory, String farmwarDir, String clusterGroup, Hashtable urlClassLoaderMap, Hashtable executorServiceMap, Hashtable messagingClassMap, ConcurrentHashMap servletMapping, MessagingElem messagingElem, ConcurrentHashMap sessionObjects) { this.scanDirectory = scanDirectory; this.urlClassLoaderMap = urlClassLoaderMap; this.executorServiceMap = executorServiceMap; this.messagingClassMap = messagingClassMap; this.servletMapping = servletMapping; this.sessionObjects = sessionObjects; farmWarFileTransfer = FarmWarFileTransfer.getInstance(scanDirectory + "/", farmwarDir, clusterGroup); try {//from w w w. j a va 2 s. c o m farmWarFileTransfer.start(); } catch (Exception e2) { // TODO Auto-generated catch block e2.printStackTrace(); } try { DigesterLoader serverdigesterLoader = DigesterLoader.newLoader(new FromXmlRulesModule() { protected void loadRules() { // TODO Auto-generated method stub try { loadXMLRules(new InputSource(new FileInputStream("./config/executorservices-config.xml"))); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); serverdigester = serverdigesterLoader.newDigester(); } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } try { DigesterLoader serverdigesterLoader = DigesterLoader.newLoader(new FromXmlRulesModule() { protected void loadRules() { // TODO Auto-generated method stub try { loadXMLRules(new InputSource(new FileInputStream("./config/messagingclass-rules.xml"))); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); messagedigester = serverdigesterLoader.newDigester(); } catch (Exception e1) { // TODO Auto-generated catch block e1.printStackTrace(); } try { DigesterLoader serverdigesterLoader = DigesterLoader.newLoader(new FromXmlRulesModule() { protected void loadRules() { // TODO Auto-generated method stub try { loadXMLRules(new InputSource(new FileInputStream("./config/webxml-rules.xml"))); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); webxmldigester = serverdigesterLoader.newDigester(); } catch (Exception ex) { //ex.printStackTrace(); } synchronized (messagingElem) { this.messagingElem = messagingElem; ConcurrentHashMap randomQueue = messagingElem.randomQueue; Set<String> randomQueueSet = randomQueue.keySet(); Iterator<String> ite = randomQueueSet.iterator(); while (ite.hasNext()) { Queue queue = (Queue) randomQueue.get(ite.next()); ConcurrentHashMap randomqueuemap = (ConcurrentHashMap) messagingClassMap.get("RandomQueue"); if (randomqueuemap == null) { randomqueuemap = new ConcurrentHashMap(); messagingClassMap.put("RandomQueue", randomqueuemap); } CopyOnWriteArrayList randomqueuelist = (CopyOnWriteArrayList) randomqueuemap .get(queue.getQueuename()); if (randomqueuelist == null) randomqueuemap.put(queue.getQueuename(), new CopyOnWriteArrayList()); } ConcurrentHashMap roundrobinQueue = messagingElem.roundrobinQueue; Set<String> roundrobinQueueSet = roundrobinQueue.keySet(); ite = roundrobinQueueSet.iterator(); while (ite.hasNext()) { Queue queue = (Queue) roundrobinQueue.get(ite.next()); ConcurrentHashMap roundrobinqueuemap = (ConcurrentHashMap) messagingClassMap.get("RoundRobinQueue"); if (roundrobinqueuemap == null) { roundrobinqueuemap = new ConcurrentHashMap(); messagingClassMap.put("RoundRobinQueue", roundrobinqueuemap); } CopyOnWriteArrayList randomqueuelist = (CopyOnWriteArrayList) roundrobinqueuemap .get(queue.getQueuename()); if (randomqueuelist == null) roundrobinqueuemap.put(queue.getQueuename(), new CopyOnWriteArrayList()); } ConcurrentHashMap topicMap = messagingElem.topicMap; Set<String> topicSet = topicMap.keySet(); Iterator<String> iter = topicSet.iterator(); while (iter.hasNext()) { Topic topic = (Topic) topicMap.get(iter.next()); ConcurrentHashMap topicmap = (ConcurrentHashMap) messagingClassMap.get("Topic"); if (topicmap == null) { topicmap = new ConcurrentHashMap(); messagingClassMap.put("Topic", topicmap); } CopyOnWriteArrayList randomqueuelist = (CopyOnWriteArrayList) topicmap.get(topic.getTopicname()); if (randomqueuelist == null) topicmap.put(topic.getTopicname(), new CopyOnWriteArrayList()); } System.out.println(messagingClassMap); } }
From source file:com.opensymphony.xwork2.util.DefaultLocalizedTextProvider.java
/** * Add's the bundle to the internal list of default bundles. * <p>/* www. j a va2 s .com*/ * If the bundle already exists in the list it will be readded. * </p> * * @param resourceBundleName the name of the bundle to add. */ @Override public void addDefaultResourceBundle(String resourceBundleName) { //make sure this doesn't get added more than once final ClassLoader ccl = getCurrentThreadContextClassLoader(); synchronized (XWORK_MESSAGES_BUNDLE) { List<String> bundles = classLoaderMap.get(ccl.hashCode()); if (bundles == null) { bundles = new CopyOnWriteArrayList<>(); classLoaderMap.put(ccl.hashCode(), bundles); } bundles.remove(resourceBundleName); bundles.add(0, resourceBundleName); } if (LOG.isDebugEnabled()) { LOG.debug( "Added default resource bundle '{}' to default resource bundles for the following classloader '{}'", resourceBundleName, ccl.toString()); } }
From source file:org.opencb.opencga.storage.hadoop.variant.HadoopVariantStorageEngine.java
@Override public List<StoragePipelineResult> index(List<URI> inputFiles, URI outdirUri, boolean doExtract, boolean doTransform, boolean doLoad) throws StorageEngineException { if (inputFiles.size() == 1 || !doLoad) { return super.index(inputFiles, outdirUri, doExtract, doTransform, doLoad); }//from ww w.j a va 2 s . c om final boolean doArchive; final boolean doMerge; if (!getOptions().containsKey(HADOOP_LOAD_ARCHIVE) && !getOptions().containsKey(HADOOP_LOAD_VARIANT)) { doArchive = true; doMerge = true; } else { doArchive = getOptions().getBoolean(HADOOP_LOAD_ARCHIVE, false); doMerge = getOptions().getBoolean(HADOOP_LOAD_VARIANT, false); } if (!doArchive && !doMerge) { return Collections.emptyList(); } final int nThreadArchive = getOptions().getInt(HADOOP_LOAD_ARCHIVE_BATCH_SIZE, 2); ObjectMap extraOptions = new ObjectMap().append(HADOOP_LOAD_ARCHIVE, true).append(HADOOP_LOAD_VARIANT, false); final List<StoragePipelineResult> concurrResult = new CopyOnWriteArrayList<>(); List<VariantStoragePipeline> etlList = new ArrayList<>(); ExecutorService executorService = Executors.newFixedThreadPool(nThreadArchive, r -> { Thread t = new Thread(r); t.setDaemon(true); return t; }); // Set Daemon for quick shutdown !!! LinkedList<Future<StoragePipelineResult>> futures = new LinkedList<>(); List<Integer> indexedFiles = new CopyOnWriteArrayList<>(); for (URI inputFile : inputFiles) { //Provide a connected storageETL if load is required. VariantStoragePipeline storageETL = newStorageETL(doLoad, new ObjectMap(extraOptions)); futures.add(executorService.submit(() -> { try { Thread.currentThread().setName(Paths.get(inputFile).getFileName().toString()); StoragePipelineResult storagePipelineResult = new StoragePipelineResult(inputFile); URI nextUri = inputFile; boolean error = false; if (doTransform) { try { nextUri = transformFile(storageETL, storagePipelineResult, concurrResult, nextUri, outdirUri); } catch (StoragePipelineException ignore) { //Ignore here. Errors are stored in the ETLResult error = true; } } if (doLoad && doArchive && !error) { try { loadFile(storageETL, storagePipelineResult, concurrResult, nextUri, outdirUri); } catch (StoragePipelineException ignore) { //Ignore here. Errors are stored in the ETLResult error = true; } } if (doLoad && !error) { // Read the VariantSource to get the original fileName (it may be different from the // nextUri.getFileName if this is the transformed file) String fileName = storageETL.readVariantSource(nextUri, null).getFileName(); // Get latest study configuration from DB, might have been changed since StudyConfiguration studyConfiguration = storageETL.getStudyConfiguration(); // Get file ID for the provided file name Integer fileId = studyConfiguration.getFileIds().get(fileName); indexedFiles.add(fileId); } return storagePipelineResult; } finally { try { storageETL.close(); } catch (StorageEngineException e) { logger.error("Issue closing DB connection ", e); } } })); } executorService.shutdown(); int errors = 0; try { while (!futures.isEmpty()) { executorService.awaitTermination(1, TimeUnit.MINUTES); // Check values if (futures.peek().isDone() || futures.peek().isCancelled()) { Future<StoragePipelineResult> first = futures.pop(); StoragePipelineResult result = first.get(1, TimeUnit.MINUTES); if (result.getTransformError() != null) { //TODO: Handle errors. Retry? errors++; result.getTransformError().printStackTrace(); } else if (result.getLoadError() != null) { //TODO: Handle errors. Retry? errors++; result.getLoadError().printStackTrace(); } concurrResult.add(result); } } if (errors > 0) { throw new StoragePipelineException("Errors found", concurrResult); } if (doLoad && doMerge) { int batchMergeSize = getOptions().getInt(HADOOP_LOAD_VARIANT_BATCH_SIZE, 10); // Overwrite default ID list with user provided IDs List<Integer> pendingFiles = indexedFiles; if (getOptions().containsKey(HADOOP_LOAD_VARIANT_PENDING_FILES)) { List<Integer> idList = getOptions().getAsIntegerList(HADOOP_LOAD_VARIANT_PENDING_FILES); if (!idList.isEmpty()) { // only if the list is not empty pendingFiles = idList; } } List<Integer> filesToMerge = new ArrayList<>(batchMergeSize); int i = 0; for (Iterator<Integer> iterator = pendingFiles.iterator(); iterator.hasNext(); i++) { Integer indexedFile = iterator.next(); filesToMerge.add(indexedFile); if (filesToMerge.size() == batchMergeSize || !iterator.hasNext()) { extraOptions = new ObjectMap().append(HADOOP_LOAD_ARCHIVE, false) .append(HADOOP_LOAD_VARIANT, true) .append(HADOOP_LOAD_VARIANT_PENDING_FILES, filesToMerge); AbstractHadoopVariantStoragePipeline localEtl = newStorageETL(doLoad, extraOptions); int studyId = getOptions().getInt(Options.STUDY_ID.key()); localEtl.preLoad(inputFiles.get(i), outdirUri); localEtl.merge(studyId, filesToMerge); localEtl.postLoad(inputFiles.get(i), outdirUri); filesToMerge.clear(); } } annotateLoadedFiles(outdirUri, inputFiles, concurrResult, getOptions()); calculateStatsForLoadedFiles(outdirUri, inputFiles, concurrResult, getOptions()); } } catch (InterruptedException e) { Thread.interrupted(); throw new StoragePipelineException("Interrupted!", e, concurrResult); } catch (ExecutionException e) { throw new StoragePipelineException("Execution exception!", e, concurrResult); } catch (TimeoutException e) { throw new StoragePipelineException("Timeout Exception", e, concurrResult); } finally { if (!executorService.isShutdown()) { try { executorService.shutdownNow(); } catch (Exception e) { logger.error("Problems shutting executer service down", e); } } } return concurrResult; }
From source file:com.hellblazer.process.JavaProcessTest.java
public void testTailStdInputOutputStreams() throws Exception { final List<String> lines = new CopyOnWriteArrayList<>(); TailerListener listener = new TailerListenerAdapter() { @Override//ww w .j a va 2 s. co m public void handle(String line) { lines.add(line); } }; copyTestClassFile(); JavaProcess process = new JavaProcessImpl(processFactory.create()); String testLine = "hello"; process.setArguments(new String[] { "-readln", testLine }); process.setJavaClass(HelloWorld.class.getCanonicalName()); process.setDirectory(testDir); process.setJavaExecutable(javaBin); Tailer tailer = null; try { launchProcess(process); tailer = process.tailStdOut(listener); try (PrintWriter writer = new PrintWriter(new OutputStreamWriter(process.getStdIn()))) { writer.println(testLine); writer.flush(); } assertEquals("Process exited normally", 0, process.waitFor()); assertTrue("Process not active", !process.isActive()); Utils.waitForCondition(1000, new Condition() { @Override public boolean isTrue() { return lines.size() > 1; } }); assertEquals(2, lines.size()); assertEquals(testLine, lines.get(1)); tailer.stop(); } finally { if (tailer != null) { tailer.stop(); } process.destroy(); } }
From source file:com.l2jfree.gameserver.model.entity.events.CTF.java
public static void CheckRestoreFlags() { CopyOnWriteArrayList<Integer> teamsTakenFlag = new CopyOnWriteArrayList<Integer>(); try {//w ww.ja v a 2s.co m for (L2Player player : _players) { //if there's a player with a flag //add the index of the team who's FLAG WAS TAKEN to the list if (player != null) { final CTFPlayerInfo info = player.as(CTFPlayerInfo.class); if (player.isOnline() == 0 && info._haveFlagCTF)// logged off with a flag in his hands { AnnounceToPlayers(false, _eventName + "(CTF): " + player.getName() + " logged off with a CTF flag!"); info._haveFlagCTF = false; if (_teams.indexOf(info._teamNameHaveFlagCTF) >= 0) { if (_flagsTaken.get(_teams.indexOf(info._teamNameHaveFlagCTF))) { _flagsTaken.set(_teams.indexOf(info._teamNameHaveFlagCTF), false); spawnFlag(info._teamNameHaveFlagCTF); AnnounceToPlayers(false, _eventName + "(CTF): " + info._teamNameHaveFlagCTF + " flag now returned to place."); } } removeFlagFromPlayer(player); info._teamNameHaveFlagCTF = null; return; } else if (info._haveFlagCTF) teamsTakenFlag.add(_teams.indexOf(info._teamNameHaveFlagCTF)); } } //Go over the list of ALL teams for (String team : _teams) { if (team == null) continue; int index = _teams.indexOf(team); if (!teamsTakenFlag.contains(index)) { if (_flagsTaken.get(index)) { _flagsTaken.set(index, false); spawnFlag(team); AnnounceToPlayers(false, _eventName + "(CTF): " + team + " flag returned due to player error."); } } } //Check if a player ran away from the event holding a flag: for (L2Player player : _players) { if (player == null) continue; final CTFPlayerInfo info = player.as(CTFPlayerInfo.class); if (info._haveFlagCTF) { if (isOutsideCTFArea(player)) { AnnounceToPlayers(false, _eventName + "(CTF): " + player.getName() + " escaped from the event holding a flag!"); info._haveFlagCTF = false; if (_teams.indexOf(info._teamNameHaveFlagCTF) >= 0) { if (_flagsTaken.get(_teams.indexOf(info._teamNameHaveFlagCTF))) { _flagsTaken.set(_teams.indexOf(info._teamNameHaveFlagCTF), false); spawnFlag(info._teamNameHaveFlagCTF); AnnounceToPlayers(false, _eventName + "(CTF): " + info._teamNameHaveFlagCTF + " flag now returned to place."); } } removeFlagFromPlayer(player); info._teamNameHaveFlagCTF = null; player.teleToLocation(_teamsX.get(_teams.indexOf(info._teamNameCTF)), _teamsY.get(_teams.indexOf(info._teamNameCTF)), _teamsZ.get(_teams.indexOf(info._teamNameCTF))); player.sendMessage("You have been returned to your team spawn"); return; } } } } catch (Exception e) { _log.info("CTF.restoreFlags() Error:", e); } }
From source file:eu.qualityontime.commons.QPropertyUtilsBean.java
/** Base constructor */ public QPropertyUtilsBean() { descriptorsCache = new WeakFastHashMap<Class<?>, BeanIntrospectionData>(); descriptorsCache.setFast(true);//w ww. j a va 2 s . c o m mappedDescriptorsCache = new WeakFastHashMap<Class<?>, FastHashMap>(); mappedDescriptorsCache.setFast(true); introspectors = new CopyOnWriteArrayList<BeanIntrospector>(); resetBeanIntrospectors(); }