List of usage examples for java.util.concurrent ConcurrentHashMap size
public int size()
From source file:uk.co.tfd.symplectic.harvester.SymplecticFetch.java
private void consumeTasks(ConcurrentHashMap<String, FutureTask<String>> worklist, ProgressTracker tracker) { for (Entry<String, FutureTask<String>> e : worklist.entrySet()) { if (e.getValue().isDone()) { try { LOGGER.info("Recieved " + e.getValue().get()); } catch (Exception e1) { LOGGER.info("Failed {} ", e.getKey(), e1); }/*from w w w .j av a2 s .co m*/ worklist.remove(e.getKey()); } } if (System.currentTimeMillis() > lastLog + 5000) { LOGGER.info("Current Worklist Backlog {} In Pending or Loading state {} ", worklist.size(), tracker.pending()); lastLog = System.currentTimeMillis(); } }
From source file:spade.utility.BitcoinTools.java
public void writeBlocksToCSV(int startIndex, int endIndex) { // Block block, int lastBlockId int lastBlockId = -1; final BitcoinTools bitcoinTools = new BitcoinTools(); String pattern = "#.##"; DecimalFormat decimalFormat = new DecimalFormat(pattern); final ConcurrentHashMap<Integer, Block> blockMap = new ConcurrentHashMap<Integer, Block>(); final AtomicInteger currentBlock = new AtomicInteger(startIndex); final int stopIndex = endIndex; final int totalThreads = Runtime.getRuntime().availableProcessors(); class BlockFetcher implements Runnable { public void run() { while (true) { if (blockMap.size() > totalThreads * 5) { // max objects to hold in memory max 1 MB * totalThreads * factor try { Thread.sleep(100); continue; } catch (Exception exception) { }/*from w w w .j av a 2s. com*/ } int blockToFetch = currentBlock.getAndIncrement(); try { blockMap.put(blockToFetch, bitcoinTools.getBlock(blockToFetch)); } catch (JSONException exception) { Bitcoin.log(Level.SEVERE, "Block " + blockToFetch + " has invalid json. Redownloading.", exception); try { blockMap.put(blockToFetch, bitcoinTools.getBlock(blockToFetch)); } catch (JSONException ex) { Bitcoin.log(Level.SEVERE, "Block " + blockToFetch + " couldn't be included in CSV.", ex); } } if (blockToFetch >= stopIndex) { break; } } } } ArrayList<Thread> workers = new ArrayList<Thread>(); for (int i = 0; i < totalThreads; i++) { Thread th = new Thread(new BlockFetcher()); workers.add(th); th.start(); } int percentageCompleted = 0; for (int i = startIndex; i < endIndex; i++) { try { Block block; while (!blockMap.containsKey(i)) { } block = blockMap.get(i); blockMap.remove(i); lastBlockId = writeBlockToCSV(block, lastBlockId); if ((((i - startIndex + 1) * 100) / (endIndex - startIndex)) > percentageCompleted) { Runtime rt = Runtime.getRuntime(); long totalMemory = rt.totalMemory() / 1024 / 1024; long freeMemory = rt.freeMemory() / 1024 / 1024; long usedMemory = totalMemory - freeMemory; System.out.print("| Cores: " + rt.availableProcessors() + " | Threads: " + totalThreads + " | Heap (MB) - total: " + totalMemory + ", %age free: " + (freeMemory * 100) / totalMemory + " | At Block: " + (i - startIndex + 1) + " / " + (endIndex - startIndex) + " | Percentage Completed: " + percentageCompleted // + " |\r"); + " |\n"); } percentageCompleted = ((i - startIndex + 1) * 100) / (endIndex - startIndex); } catch (IOException ex) { Bitcoin.log(Level.SEVERE, "Unexpected IOException. Stopping CSV creation.", ex); break; } } for (int i = 0; i < totalThreads; i++) { try { workers.get(i).interrupt(); workers.get(i).join(); } catch (InterruptedException exception) { } } System.out.println("\n\ndone with creating CSVes!"); }
From source file:com.taobao.diamond.client.impl.DefaultDiamondSubscriber.java
public synchronized void removeDataId(String dataId, String group) { if (null == group) { group = Constants.DEFAULT_GROUP; }/* w w w. j a v a2 s . c o m*/ ConcurrentHashMap<String, CacheData> cacheDatas = this.cache.get(dataId); if (null == cacheDatas) { return; } cacheDatas.remove(group); log.warn("DataID[" + dataId + "]Group: " + group); if (cacheDatas.size() == 0) { this.cache.remove(dataId); log.warn("DataID[" + dataId + "]"); } }
From source file:cn.leancloud.diamond.client.impl.DefaultDiamondSubscriber.java
public synchronized void removeDataId(String dataId, String group) { if (null == group) { group = Constants.DEFAULT_GROUP; }/*from w w w .j a v a2 s. c om*/ ConcurrentHashMap<String, CacheData> cacheDatas = this.cache.get(dataId); if (null == cacheDatas) { return; } cacheDatas.remove(group); log.warn("DataID[" + dataId + "]Group: " + group); if (cacheDatas.size() == 0) { this.cache.remove(dataId); log.warn("DataID[" + dataId + "]"); } }
From source file:hu.sztaki.lpds.pgportal.portlets.workflow.RealWorkflowPortlet.java
/** * Displaying Workflow configuration interface *//* w w w.jav a 2s . c o m*/ public void doConfigure(ActionRequest request, ActionResponse response) throws PortletException { // Querying logged user String userID; if (request.getParameter("adminuser") == null) userID = request.getRemoteUser(); else userID = request.getParameter("adminuser"); if (request.getParameter("pcwkf") != null) { String workflowName = request.getParameter("workflow"); if (!workflowName.equals(request.getPortletSession().getAttribute("cworkflow"))) NewWorkflowUtil.fromWorkflow(request, response); String graphName = PortalCacheService.getInstance().getUser(userID).getWorkflow(workflowName).getGraf(); if (!graphName.equals(request.getParameter("pgraf"))) RealWorkflowUtil.changeGraph(request, response); } //Over the Quota if (UserQuotaUtils.getInstance().userQuotaIsFull(userID)) { setRequestAttribute(request.getPortletSession(), "msg", "portal.RealWorkflowPortlet.quotaisoverfull"); return; } //Session query PortletSession ps = request.getPortletSession(); //Available resource configuration query try { if (ps.getAttribute("resources", ps.APPLICATION_SCOPE) == null) { ResourceConfigurationFace rc = (ResourceConfigurationFace) InformationBase.getI() .getServiceClient("resourceconfigure", "portal"); List<Middleware> tmp_r = rc.get(); ps.setAttribute("resources", tmp_r, ps.APPLICATION_SCOPE); ps.setAttribute("pub_resources", tmp_r, ps.APPLICATION_SCOPE); } } catch (Exception ex) { ex.printStackTrace(); } if (request.getParameter("workflow") != null) { ps.setAttribute("cworkflow1", request.getParameter("workflow"), ps.APPLICATION_SCOPE); request.getPortletSession().setAttribute("cworkflow", request.getParameter("workflow")); } workflow = request.getParameter("workflow"); setRequestAttribute(request.getPortletSession(), "graphs", PortalCacheService.getInstance().getUser(userID).getAbstactWorkflows()); setRequestAttribute(request.getPortletSession(), "templates", PortalCacheService.getInstance().getUser(userID).getTemplateWorkflows()); Hashtable hsh = new Hashtable(); hsh.put("url", PortalCacheService.getInstance().getUser(userID).getWorkflow(workflow).getWfsID()); ServiceType st = InformationBase.getI().getService("wfs", "portal", hsh, new Vector()); try { PortalWfsClient pc = (PortalWfsClient) Class.forName(st.getClientObject()).newInstance(); pc.setServiceURL(st.getServiceUrl()); pc.setServiceID(st.getServiceID()); ComDataBean tmp = new ComDataBean(); tmp.setPortalID(PropertyLoader.getInstance().getProperty("service.url")); tmp.setUserID(userID); tmp.setWorkflowID(workflow); if (PortalCacheService.getInstance().getUser(userID).getWorkflow(workflow).getWorkflowType() .equals("multinode") || PortalCacheService.getInstance().getUser(userID).getWorkflow(workflow).getWorkflowType() .equals("singlenode")) { PortalCacheService.getInstance().getUser(userID) .setConfiguringWorkflowWFProp(pc.getWorkflowConfigData(tmp), pc.getWorkflowProps(tmp)); } else { PortalCacheService.getInstance().getUser(userID) .setConfiguringWorkflow(pc.getWorkflowConfigData(tmp)); } setRequestAttribute(request.getPortletSession(), "jobs", PortalCacheService.getInstance().getUser(userID).getConfiguringWorkflow()); Vector ltmp = new Vector(); for (int i = 0; i < PortalCacheService.getInstance().getUser(userID).getConfiguringWorkflow() .size(); i++) { // replace special characters... String jobtxt = new String(((JobPropertyBean) PortalCacheService.getInstance().getUser(userID) .getConfiguringWorkflow().get(i)).getTxt()); ((JobPropertyBean) PortalCacheService.getInstance().getUser(userID).getConfiguringWorkflow().get(i)) .setTxt(replaceTextS(jobtxt)); // inputs for (int j = 0; j < ((JobPropertyBean) PortalCacheService.getInstance().getUser(userID) .getConfiguringWorkflow().get(i)).getInputs().size(); j++) { PortDataBean ptmp = (PortDataBean) ((JobPropertyBean) PortalCacheService.getInstance() .getUser(userID).getConfiguringWorkflow().get(i)).getInputs().get(j); // replace special characters... ptmp.setTxt(replaceTextS(ptmp.getTxt())); if (!ptmp.getPrejob().equals("")) { for (int k = 0; k < PortalCacheService.getInstance().getUser(userID) .getConfiguringWorkflow().size(); k++) { if (((JobPropertyBean) PortalCacheService.getInstance().getUser(userID) .getConfiguringWorkflow().get(k)).getName().equals(ptmp.getPrejob())) { for (int z = 0; z < ((JobPropertyBean) PortalCacheService.getInstance() .getUser(userID).getConfiguringWorkflow().get(k)).getOutputs() .size(); z++) { if (ptmp.getPreoutput() .equals("" + ((PortDataBean) ((JobPropertyBean) PortalCacheService .getInstance().getUser(userID).getConfiguringWorkflow().get(k)) .getOutputs().get(z)).getSeq())) { long x = ((PortDataBean) ((JobPropertyBean) PortalCacheService.getInstance() .getUser(userID).getConfiguringWorkflow().get(k)).getOutputs() .get(z)).getX(); long y = ((PortDataBean) ((JobPropertyBean) PortalCacheService.getInstance() .getUser(userID).getConfiguringWorkflow().get(k)).getOutputs() .get(z)).getY(); ltmp.add(new LineCoord("" + ptmp.getX(), "" + ptmp.getY(), "" + x, "" + y)); } } } } } } // outputs for (int jo = 0; jo < ((JobPropertyBean) PortalCacheService.getInstance().getUser(userID) .getConfiguringWorkflow().get(i)).getOutputs().size(); jo++) { PortDataBean ptmpo = (PortDataBean) ((JobPropertyBean) PortalCacheService.getInstance() .getUser(userID).getConfiguringWorkflow().get(i)).getOutputs().get(jo); // replace special characters... ptmpo.setTxt(replaceTextS(ptmpo.getTxt())); } } setRequestAttribute(request.getPortletSession(), "lineList", ltmp); } catch (Exception e) { e.printStackTrace(); } String storageURL = PortalCacheService.getInstance().getUser(userID).getWorkflow(workflow).getStorageID(); if (storageURL == null) { st = InformationBase.getI().getService("storage", "portal", new Hashtable(), new Vector()); storageURL = st.getServiceUrl(); } setRequestAttribute(request.getPortletSession(), "storageID", storageURL); setRequestAttribute(request.getPortletSession(), "userID", userID); setRequestAttribute(request.getPortletSession(), "portalID", PropertyLoader.getInstance().getProperty("service.url")); setRequestAttribute(request.getPortletSession(), "wrkdata", PortalCacheService.getInstance().getUser(userID).getWorkflow(workflow)); setRequestAttribute(request.getPortletSession(), "grafs", Sorter.getInstance() .sortFromValues(PortalCacheService.getInstance().getUser(userID).getAbstactWorkflows())); setRequestAttribute(request.getPortletSession(), "awkfs", Sorter.getInstance() .sortFromValues(PortalCacheService.getInstance().getUser(userID).getTemplateWorkflows())); //If workflow instance exists, the graph is not exchangeable. String enablecgraf = ""; if (!PortalCacheService.getInstance().getUser(userID).getWorkflow(workflow).getAllRuntimeInstance() .isEmpty()) {//AllWorkflow ConcurrentHashMap h = PortalCacheService.getInstance().getUser(userID).getWorkflow(workflow) .getAllRuntimeInstance(); if (h.size() == 1 && !(h.containsKey("AllWorkflow") || h.containsKey("allworkflow"))) { //"allworkflow".equalsIgnoreCase(runtimeID) enablecgraf = "disabled"; } else if (h.size() > 1) { enablecgraf = "disabled"; } // System.out.println("Modify graf LOCK:" + enablecgraf + " wfs:" + PortalCacheService.getInstance().getUser(userID).getWorkflow(workflow).getAllRuntimeInstance()); } setRequestAttribute(request.getPortletSession(), "enablecgraf", enablecgraf); // // set configure ID String confID = userID + String.valueOf(System.currentTimeMillis()); setRequestAttribute(request.getPortletSession(), "confID", confID); // doList(request, response); request.setAttribute("jsp", "/jsp/workflow/" + PortalCacheService.getInstance().getUser(userID).getWorkflow(workflow).getWorkflowType() + "/configure.jsp"); setRequestAttribute(request.getPortletSession(), "navigatepage", "/jsp/workflow/" + PortalCacheService.getInstance().getUser(userID).getWorkflow(workflow).getWorkflowType() + "/configure.jsp"); }
From source file:com.pari.nm.modules.jobs.PcbImportJob.java
private void populateVoIPPhones(String voipPhoneList) { try {/*from w w w . j av a2 s . c o m*/ InputStream fin = new ByteArrayInputStream(voipPhoneList.getBytes()); DocumentBuilder docBuilder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); Document doc = docBuilder.parse(fin); Element docRoot = doc.getDocumentElement(); if (!docRoot.getTagName().equals("VoipPhoneList")) { throw new Exception("Invalid format. Expecting: VoipPhoneList, found " + docRoot.getTagName()); } HashMap<Integer, ArrayList<VoipPhone>> phonesMap = new HashMap<Integer, ArrayList<VoipPhone>>(); ConcurrentHashMap<String, Integer> custIpMap = CustomerManager.getInstance() .getCustomerIpMap(customerId); List<Element> phoneElementList = XMLUtil.getFirstLevelChildElementsByTagName(docRoot, "VoipPhone"); if ((phoneElementList != null) && (phoneElementList.size() > 0) && (custIpMap != null) && (custIpMap.size() > 0)) { for (Element voipPhoneElem : phoneElementList) { VoipPhone phone = new VoipPhone(); String ipAddress = XMLUtil.getChildText(voipPhoneElem, "IpAddress"); if (ipAddress != null) { phone.setIpAddress(ipAddress.trim()); } String phoneModel = XMLUtil.getChildText(voipPhoneElem, "PhoneModel"); if (phoneModel != null) { phone.setModel(phoneModel.trim()); } String vendorName = XMLUtil.getChildText(voipPhoneElem, "VendorName"); if (vendorName != null) { phone.setVendorName(vendorName.trim()); } String macAddress = XMLUtil.getChildText(voipPhoneElem, "MACAddress"); if (macAddress != null) { phone.setMacAddress(macAddress.trim()); } String switchInterface = XMLUtil.getChildText(voipPhoneElem, "SwitchInterface"); if (switchInterface != null) { phone.setSwitchInterface(switchInterface.trim()); } String serialNumber = XMLUtil.getChildText(voipPhoneElem, "SerialNumber"); if (serialNumber != null) { phone.setSerialNumber(serialNumber.trim()); } String userName = XMLUtil.getChildText(voipPhoneElem, "UserName"); if (userName != null) { phone.setUserName(userName.trim()); } String extension = XMLUtil.getChildText(voipPhoneElem, "UserName"); if (extension != null) { phone.setPhoneNumber(extension.trim()); } String switchIpAddress = XMLUtil.getChildText(voipPhoneElem, "SwitchIpAddress"); Integer deviceId = custIpMap.get(switchIpAddress); if ((deviceId != null) && (deviceId.intValue() != -1)) { phone.setSwitchNodeId(deviceId); ArrayList<VoipPhone> phonesList = phonesMap.get(deviceId); if (phonesList == null) { phonesList = new ArrayList<VoipPhone>(); phonesMap.put(deviceId, phonesList); } phonesList.add(phone); } } } if (phonesMap.size() > 0) { try { VOIPDBHelper.saveVOIPPhones(customerId, instanceId, phonesMap); } catch (Exception ex) { logger.warn("Error while saving VOIP Phones for the customer: " + customerId + " instance=" + instanceId, ex); } } } catch (Exception ex) { logger.warn( "Error while saving VOIP Phones for the customer: " + customerId + " instance=" + instanceId, ex); } }
From source file:org.apache.falcon.entity.store.ConfigurationStore.java
private void loadEntity(final EntityType type) throws FalconException { try {//from ww w .j a va 2 s .c om final ConcurrentHashMap<String, Entity> entityMap = dictionary.get(type); FileStatus[] files = fs.globStatus(new Path(storePath, type.name() + Path.SEPARATOR + "*")); if (files != null) { final ExecutorService service = Executors.newFixedThreadPool(100); for (final FileStatus file : files) { service.execute(new Runnable() { @Override public void run() { try { String fileName = file.getPath().getName(); String encodedEntityName = fileName.substring(0, fileName.length() - 4); // drop // ".xml" String entityName = URLDecoder.decode(encodedEntityName, UTF_8); Entity entity = restore(type, entityName); entityMap.put(entityName, entity); } catch (IOException | FalconException e) { LOG.error("Unable to restore entity of", file); } } }); } service.shutdown(); if (service.awaitTermination(10, TimeUnit.MINUTES)) { LOG.info("Restored Configurations for entity type: {} ", type.name()); } else { LOG.warn("Time out happened while waiting for all threads to finish while restoring entities " + "for type: {}", type.name()); } // Checking if all entities were loaded if (entityMap.size() != files.length) { throw new FalconException("Unable to restore configurations for entity type " + type.name()); } for (Entity entity : entityMap.values()) { onReload(entity); } } } catch (IOException e) { throw new FalconException("Unable to restore configurations", e); } catch (InterruptedException e) { throw new FalconException( "Failed to restore configurations in 10 minutes for entity type " + type.name()); } }
From source file:org.dataconservancy.dcs.integration.main.UpdateDepositIT.java
@Test public void testConcurrentUpdatesToSameLineage() throws Exception { SIP sip1 = new SIP(); sip1.addVersionWithNewFile(null, "v11"); Dcp result1 = deposit(sip1);/*from ww w .j a v a 2 s. com*/ assertNotNull(result1); assertEquals(1, result1.getDeliverableUnits().size()); final DcsDeliverableUnit du1 = result1.getDeliverableUnits().iterator().next(); assertNotNull(du1.getLineageId()); // Attempt to update the du from sip1 simultaneously from a number of // threads. Only one update should succeed. Thread[] threads = new Thread[NUM_CONCURRENT_DEPOSITS]; final ConcurrentHashMap<String, String> success = new ConcurrentHashMap<String, String>(); for (int i = 0; i < threads.length; i++) { final String thread_name = "" + i; threads[i] = new Thread(new Runnable() { public void run() { SIP sip2 = new SIP(); try { sip2.addVersion(du1.getId(), "v2 " + thread_name); Dcp result2 = deposit(sip2); if (result2 == null) { // Another update is in progress or succeeded } else { assertEquals(1, result2.getDeliverableUnits().size()); DcsDeliverableUnit du2 = result2.getDeliverableUnits().iterator().next(); assertNotNull(du2.getLineageId()); assertEquals(du1.getLineageId(), du2.getLineageId()); success.put(du2.getId(), du2.getLineageId()); } } catch (Exception e) { StringWriter sw = new StringWriter(); e.printStackTrace(new PrintWriter(sw)); log.info("Deposit failed: " + sw.toString()); } } }, thread_name); } for (int i = 0; i < threads.length; i++) { threads[i].start(); } for (int i = 0; i < threads.length; i++) { threads[i].join(); } assertEquals(1, success.size()); }
From source file:org.dataconservancy.dcs.integration.main.UpdateDepositIT.java
@Test public void testConcurrentUpdatesToDifferentLineages() throws Exception { // Attempt to update different lineages simultaneously from a number of // threads. All updates should succeed. DcsDeliverableUnit[] predecessors = new DcsDeliverableUnit[NUM_CONCURRENT_DEPOSITS]; for (int i = 0; i < predecessors.length; i++) { SIP sip = new SIP(); sip.addVersionWithNewFile(null, "v11"); Dcp result = deposit(sip);/*from w w w .j ava 2s .c o m*/ assertNotNull(result); assertEquals(1, result.getDeliverableUnits().size()); final DcsDeliverableUnit du = result.getDeliverableUnits().iterator().next(); assertNotNull(du.getLineageId()); predecessors[i] = du; } Thread[] threads = new Thread[NUM_CONCURRENT_DEPOSITS]; final ConcurrentHashMap<String, String> success = new ConcurrentHashMap<String, String>(); for (int i = 0; i < threads.length; i++) { final String thread_name = "" + i; final DcsDeliverableUnit pred = predecessors[i]; threads[i] = new Thread(new Runnable() { public void run() { SIP sip2 = new SIP(); final String title = "v2 " + thread_name; try { sip2.addVersion(pred.getId(), title); Dcp result2 = deposit(sip2); assertNotNull(result2); assertEquals(1, result2.getDeliverableUnits().size()); DcsDeliverableUnit du2 = result2.getDeliverableUnits().iterator().next(); assertNotNull(du2.getLineageId()); assertEquals(pred.getLineageId(), du2.getLineageId()); success.put(du2.getId(), du2.getLineageId()); } catch (Exception e) { StringWriter sw = new StringWriter(); e.printStackTrace(new PrintWriter(sw)); fail("SIP deposit (with predecessor [" + pred.getId() + "] and title [" + title + "]) failed: " + sw.toString()); } } }, thread_name); } for (int i = 0; i < threads.length; i++) { threads[i].start(); } for (int i = 0; i < threads.length; i++) { threads[i].join(); } assertEquals(NUM_CONCURRENT_DEPOSITS, success.size()); }
From source file:be.solidx.hot.test.TestScriptExecutors.java
@SuppressWarnings("rawtypes") private Collection<Long> multiThreadedTest(final Script script, final int max, final ScriptExecutor scriptExecutor) throws InterruptedException { final int iterations = 100; ExecutorService executor = Executors.newFixedThreadPool(8); final ConcurrentHashMap<String, Long> results = new ConcurrentHashMap<String, Long>(); final ConcurrentHashMap<String, Long> avgs = new ConcurrentHashMap<String, Long>(); long benchStart = System.currentTimeMillis(); for (int i = 0; i < iterations; i++) { Runnable runnable = new Runnable() { @SuppressWarnings("unchecked") @Override//from w ww . j a v a2 s. com public void run() { try { long res = 0; Map<String, Object> parameters = new HashMap<String, Object>(); parameters.put("i", new Integer(max)); parameters.put("n", new Integer(0)); //long starting = System.currentTimeMillis(); Object object = scriptExecutor.execute(script, parameters); if (object instanceof Bindings) { Bindings bindings = (Bindings) object; res = (Integer) bindings.get("result"); bindings.clear(); } else if (object instanceof Double) { res = Math.round((Double) object); } else if (object instanceof Long) { res = (long) object; } else res = new Long((Integer) object); long end = System.currentTimeMillis() - avgs.get(this.toString()); results.put(UUID.randomUUID().getLeastSignificantBits() + "", res); avgs.put(this.toString(), end); } catch (Exception e) { e.printStackTrace(); } } }; avgs.put(runnable.toString(), System.currentTimeMillis()); executor.submit(runnable); } while (results.size() < iterations) { Thread.sleep(50); } //Thread.sleep(20000); double sum = 0; for (Long value : avgs.values()) { sum += value; } System.out.println((sum / (double) iterations) + ""); System.out.println("==== Time needed for all requests: " + (System.currentTimeMillis() - benchStart)); results.remove("avg"); executor = null; return results.values(); }