List of usage examples for java.util.concurrent ConcurrentHashMap get
public V get(Object key)
From source file:com.starit.diamond.client.impl.DefaultDiamondSubscriber.java
public void addDataId(String dataId, String group) { SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMdd HH:mm:ss"); log.info("diamond client start:" + formatter.format(new Date(System.currentTimeMillis()))); if (null == group) { group = Constants.DEFAULT_GROUP; }//from www .ja va2s .c o m ConcurrentHashMap<String, CacheData> cacheDatas = this.cache.get(dataId); if (null == cacheDatas) { ConcurrentHashMap<String, CacheData> newCacheDatas = new ConcurrentHashMap<String, CacheData>(); ConcurrentHashMap<String, CacheData> oldCacheDatas = this.cache.putIfAbsent(dataId, newCacheDatas); if (null != oldCacheDatas) { cacheDatas = oldCacheDatas; } else { cacheDatas = newCacheDatas; } } CacheData cacheData = cacheDatas.get(group); if (null == cacheData) { cacheDatas.putIfAbsent(group, new CacheData(dataId, group)); if (log.isInfoEnabled()) { log.info("DataID[" + dataId + "]Group" + group); } this.start(); DiamondClientUtil.addDataId(this.clusterType, dataId + "-" + group); } }
From source file:hu.sztaki.lpds.pgportal.portlets.workflow.EasyWorkflowPortlet.java
/** * Data transmision to vizualize Portlet UI *//*from w w w . jav a2 s . c o m*/ @Override public void doView(RenderRequest request, RenderResponse response) throws PortletException, IOException { response.setContentType("text/html"); if (!isInited()) { getPortletContext().getRequestDispatcher("/WEB-INF/jsp/error/init.jsp").include(request, response); return; } openRequestAttribute(request); // WSPgradeLogger.viewStart(request.getRemoteUser(), this.getClass().getName()); try { //if("main".equals(request.getParameter("render"))) jsp=mainjsp; if (request.getAttribute("jsp") == null) request.setAttribute("jsp", mainjsp); if (request.getAttribute("jsp").equals(mainjsp)) { ConcurrentHashMap wfmainhsh = new ConcurrentHashMap(); ConcurrentHashMap wfhsh = PortalCacheService.getInstance().getUser(request.getRemoteUser()) .getWorkflows(); Enumeration keys = wfhsh.keys(); while (keys.hasMoreElements()) { String wf = keys.nextElement().toString(); WorkflowData wData = PortalCacheService.getInstance().getUser(request.getRemoteUser()) .getWorkflow(wf); if (wData.isAppMain()) { //System.out.println("main found :" + wf); wfmainhsh.put(wf, wfhsh.get(wf)); } } request.setAttribute("appWorkflowList", Sorter.getInstance().sortFromValues(wfmainhsh));//wfmainhsh } request.setAttribute("userID", request.getRemoteUser()); request.setAttribute("portalID", PropertyLoader.getInstance().getProperty("service.url")); PortletRequestDispatcher dispatcher = null; dispatcher = pContext.getRequestDispatcher((String) request.getAttribute("jsp")); dispatcher.include(request, response); } catch (IOException e) { throw new PortletException("JSPPortlet.doView exception", e); } cleanRequestAttribute(request.getPortletSession()); // action = ""; // WSPgradeLogger.viewStop(request.getRemoteUser(), this.getClass().getName()); }
From source file:com.github.jackygurui.vertxredissonrepository.repository.SaveAndSearchAndGetCallInConcurrentTest.java
@Test public void test2SaveAndSearchAndGetCallIn(TestContext context) throws Exception { Async async = context.async();// w w w . j a v a 2 s . c om JsonNode source = JsonLoader.fromResource("/CallIn.json"); int records = 1000; AtomicLong total = new AtomicLong(0); ConcurrentHashMap<JsonObject, String> m = new ConcurrentHashMap<>(); Stream<JsonObject> stream = IntStream.rangeClosed(0, records).mapToObj(e -> { JsonObject clone = new JsonObject(Json.encode(source)); Long number = Long.parseLong(clone.getString("phoneNumber")) + e; clone.put("phoneNumber", number + ""); Long callTime = clone.getLong("callTime") + e; clone.put("callTime", callTime); return clone; }); StopWatch sw = new StopWatch(); sw.start(); stream.parallel().forEach(e -> { org.simondean.vertx.async.Async.waterfall().<String>task(t -> { callInRepository.create(Json.encode(e), t); }).<List<CallIn>>task((id, t) -> { m.put(e, id); AtomicLong idc = new AtomicLong(0); org.simondean.vertx.async.Async.retry().<List<CallIn>>task(tt -> { callInRepository.searchIndexByScoreAndGet("callTime", e.getDouble("callTime"), e.getDouble("callTime"), 0, 1, ttt -> { logger.info("id = " + id + " | retry count: " + idc.incrementAndGet()); tt.handle(ttt.succeeded() && ttt.result() != null && !ttt.result().isEmpty() ? Future.succeededFuture(ttt.result()) : Future.failedFuture(ttt.cause())); }); }).times(100000).run(t); }).run(r -> { context.assertTrue(r.succeeded()); if (r.succeeded()) { context.assertFalse(r.result().isEmpty()); context.assertEquals(1, r.result().size()); CallIn ci = r.result().iterator().next(); context.assertNotNull(ci); logger.info(Json.encode(ci)); CallIn cii = Json.decodeValue(e.put("id", m.get(e)).encode(), CallIn.class); context.assertEquals(Json.encode(cii), Json.encode(ci)); } long t; if ((t = total.incrementAndGet()) == records) { sw.stop(); logger.info("time to concurrently save and search and get " + records + " call in records: " + sw.getTime()); async.complete(); } else { logger.info("t = " + t); } }); }); }
From source file:com.web.server.WarDeployer.java
/** * This method is the implementation of the war deployer which frequently scans the deploy * directory and if there is a change in war redeploys and configures the map *//*from ww w .ja v a 2s. c om*/ public void run() { File file; ConcurrentHashMap filePrevMap = new ConcurrentHashMap(); ConcurrentHashMap fileCurrMap = new ConcurrentHashMap(); ; FileInfo filePrevLastModified; FileInfo fileCurrLastModified; String filePath; FileInfo fileinfoTmp; URLClassLoader loader = (URLClassLoader) ClassLoader.getSystemClassLoader(); URL[] urls = loader.getURLs(); warsDeployed = new CopyOnWriteArrayList(); //System.out.println("URLS="+urls[0]); WebClassLoader customClassLoader; while (true) { file = new File(scanDirectory); File[] files = file.listFiles(); for (int i = 0; i < files.length; i++) { if (files[i].isDirectory()) continue; //Long lastModified=(Long) fileMap.get(files[i].getName()); if (files[i].getName().endsWith(".war")) { filePath = files[i].getAbsolutePath(); //logger.info("filePath"+filePath); filePath = filePath.substring(0, filePath.toLowerCase().lastIndexOf(".war")); File warDirectory = new File(filePath); fileinfoTmp = new FileInfo(); fileinfoTmp.setFile(files[i]); fileinfoTmp.setLastModified(files[i].lastModified()); if (!warDirectory.exists() || fileCurrMap.get(files[i].getName()) == null && filePrevMap.get(files[i].getName()) == null) { if (warDirectory.exists()) { deleteDir(warDirectory); } customClassLoader = new WebClassLoader(urls); synchronized (urlClassLoaderMap) { logger.info("WARDIRECTORY=" + warDirectory.getAbsolutePath()); urlClassLoaderMap.put(warDirectory.getAbsolutePath().replace("\\", "/"), customClassLoader); } extractWar(files[i], customClassLoader); //System.out.println("War Deployed Successfully in path: "+filePath); AddUrlToClassLoader(warDirectory, customClassLoader); numberOfWarDeployed++; logger.info(files[i] + " Deployed"); warsDeployed.add(files[i].getName()); filePrevMap.put(files[i].getName(), fileinfoTmp); } fileCurrMap.put(files[i].getName(), fileinfoTmp); } /*if(lastModified==null||lastModified!=files[i].lastModified()){ fileMap.put(files[i].getName(),files[i].lastModified()); }*/ } Set keyset = fileCurrMap.keySet(); Iterator ite = keyset.iterator(); String fileName; while (ite.hasNext()) { fileName = (String) ite.next(); //logger.info("fileName"+fileName); filePrevLastModified = (FileInfo) filePrevMap.get(fileName); fileCurrLastModified = (FileInfo) fileCurrMap.get(fileName); if (filePrevLastModified != null) //logger.info("lastmodified="+filePrevLastModified.getLastModified()); //System.out.println("prevmodified"+fileCurrLastModified.getLastModified()+""+filePrevLastModified.getLastModified()); if (fileCurrLastModified != null) { //System.out.println("prevmodified"+fileCurrLastModified.getLastModified()); } if (filePrevLastModified == null || filePrevLastModified.getLastModified() != fileCurrLastModified.getLastModified()) { filePath = fileCurrLastModified.getFile().getAbsolutePath(); //logger.info("filePath"+filePath); filePath = filePath.substring(0, filePath.toLowerCase().lastIndexOf(".war")); File warDirectory = new File(filePath); //logger.info("WARDIRECTORY="+warDirectory.getAbsolutePath()); if (warDirectory.exists()) { WebClassLoader webClassLoader = (WebClassLoader) urlClassLoaderMap .get(warDirectory.getAbsolutePath().replace("\\", "/")); synchronized (executorServiceMap) { try { new ExecutorServicesConstruct().removeExecutorServices(executorServiceMap, new File(warDirectory.getAbsolutePath().replace("\\", "/") + "/WEB-INF/" + "executorservices.xml"), webClassLoader); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } //System.out.println("executorServiceMap"+executorServiceMap); } synchronized (messagingClassMap) { try { new MessagingClassConstruct().removeMessagingClass(messagedigester, new File(warDirectory.getAbsolutePath().replace("\\", "/") + "/WEB-INF/" + "messagingclass.xml"), messagingClassMap); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } System.out.println("executorServiceMap" + executorServiceMap); } ClassLoaderUtil.cleanupJarFileFactory(ClassLoaderUtil.closeClassLoader(webClassLoader)); try { webClassLoader.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } logger.info("ServletMapping" + servletMapping); logger.info("warDirectory=" + warDirectory.getAbsolutePath().replace("\\", "/")); urlClassLoaderMap.remove(warDirectory.getAbsolutePath().replace("\\", "/")); WebAppConfig webAppConfig = (WebAppConfig) servletMapping .remove(warDirectory.getAbsolutePath().replace("\\", "/")); System.gc(); deleteDir(warDirectory); warsDeployed.remove(fileName); removeServletFromSessionObject(webAppConfig, warDirectory.getAbsolutePath().replace("\\", "/")); numberOfWarDeployed--; } customClassLoader = new WebClassLoader(urls); logger.info(customClassLoader); urlClassLoaderMap.put(warDirectory.getAbsolutePath().replace("\\", "/"), customClassLoader); extractWar(fileCurrLastModified.getFile(), customClassLoader); //System.out.println("War Deployed Successfully in path: "+fileCurrLastModified.getFile().getAbsolutePath()); AddUrlToClassLoader(warDirectory, customClassLoader); numberOfWarDeployed++; warsDeployed.add(fileName); logger.info(filePath + ".war Deployed"); } } keyset = filePrevMap.keySet(); ite = keyset.iterator(); while (ite.hasNext()) { fileName = (String) ite.next(); filePrevLastModified = (FileInfo) filePrevMap.get(fileName); fileCurrLastModified = (FileInfo) fileCurrMap.get(fileName); if (fileCurrLastModified == null) { filePath = filePrevLastModified.getFile().getAbsolutePath(); filePath = filePath.substring(0, filePath.toLowerCase().lastIndexOf(".war")); logger.info("filePath" + filePath); File deleteDirectory = new File(filePath); logger.info("Delete Directory" + deleteDirectory.getAbsolutePath().replace("\\", "/")); WebClassLoader webClassLoader = (WebClassLoader) urlClassLoaderMap .get(deleteDirectory.getAbsolutePath().replace("\\", "/")); ; synchronized (executorServiceMap) { try { new ExecutorServicesConstruct().removeExecutorServices(executorServiceMap, new File(deleteDirectory.getAbsolutePath().replace("\\", "/") + "/WEB-INF/" + "executorservices.xml"), webClassLoader); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } //System.out.println("executorServiceMap"+executorServiceMap); } synchronized (messagingClassMap) { try { new MessagingClassConstruct().removeMessagingClass(messagedigester, new File(deleteDirectory.getAbsolutePath().replace("\\", "/") + "/WEB-INF/" + "messagingclass.xml"), messagingClassMap); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } //System.out.println("executorServiceMap"+executorServiceMap); } WebAppConfig webAppConfig = (WebAppConfig) servletMapping .remove(deleteDirectory.getAbsolutePath().replace("\\", "/")); ClassLoaderUtil.cleanupJarFileFactory(ClassLoaderUtil.closeClassLoader(webClassLoader)); urlClassLoaderMap.remove(deleteDirectory.getAbsolutePath().replace("\\", "/")); logger.info("ServletMapping" + servletMapping); logger.info("warDirectory=" + deleteDirectory.getAbsolutePath().replace("\\", "/")); try { logger.info(webClassLoader); logger.info("CLASSLOADER IS CLOSED"); webClassLoader.close(); } catch (Throwable e) { // TODO Auto-generated catch block e.printStackTrace(); } System.gc(); deleteDir(deleteDirectory); numberOfWarDeployed--; warsDeployed.remove(fileName); try { removeServletFromSessionObject(webAppConfig, deleteDirectory.getAbsolutePath().replace("\\", "/")); } catch (Exception ex) { ex.printStackTrace(); } logger.info(filePath + ".war Undeployed"); } } filePrevMap.keySet().removeAll(filePrevMap.keySet()); filePrevMap.putAll(fileCurrMap); fileCurrMap.keySet().removeAll(fileCurrMap.keySet()); //System.out.println("filePrevMap="+filePrevMap); //System.out.println("fileCurrMap="+fileCurrMap); try { Thread.sleep(3000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }
From source file:com.untangle.app.license.LicenseManagerImpl.java
/** * update the app to License Map//from w w w . ja va2 s.c o m */ private synchronized void _mapLicenses() { /* Create a new map of all of the valid licenses */ ConcurrentHashMap<String, License> newMap = new ConcurrentHashMap<String, License>(); LinkedList<License> newList = new LinkedList<License>(); License license = null; if (this.settings != null) { for (License lic : this.settings.getLicenses()) { try { /** * Create a duplicate - we're about to fill in metadata * But we don't want to mess with the original */ license = new License(lic); /** * Complete Meta-data */ _setValidAndStatus(license); String identifier = license.getCurrentName(); if (identifier == null) { logger.warn("Ignoring license with no name: " + license); continue; } License current = newMap.get(identifier); /* current license is newer and better */ if ((current != null) && (current.getEnd() > license.getEnd())) continue; logger.info("Adding License: " + license.getCurrentName() + " to Map. (valid: " + license.getValid() + ")"); newMap.put(identifier, license); newList.add(license); } catch (Exception e) { logger.warn("Failed to load license: " + license, e); } } } this.licenseMap = newMap; this.licenseList = newList; }
From source file:com.web.server.SARDeployer.java
/** * This method is the implementation of the SAR deployer *//*from ww w. ja v a 2 s . c om*/ public void run() { String filePath; FileInfo fileinfoTmp; ConcurrentHashMap filePrevMap = new ConcurrentHashMap(); ConcurrentHashMap fileCurrMap = new ConcurrentHashMap(); ; FileInfo filePrevLastModified; FileInfo fileCurrLastModified; DigesterLoader serverdigesterLoader = DigesterLoader.newLoader(new FromXmlRulesModule() { protected void loadRules() { // TODO Auto-generated method stub try { loadXMLRules(new InputSource(new FileInputStream("./config/sar-config.xml"))); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); sardigester = serverdigesterLoader.newDigester(); while (true) { try { File file = new File(deployDirectory); File[] files = file.listFiles(); for (int i = 0; i < files.length; i++) { if (files[i].isDirectory()) continue; //Long lastModified=(Long) fileMap.get(files[i].getName()); if (files[i].getName().toLowerCase().endsWith(".sar")) { filePath = files[i].getAbsolutePath(); //logger.info("filePath"+filePath); filePath = filePath.substring(0, filePath.toLowerCase().lastIndexOf(".sar")); File sarDirectory = new File(filePath + "sar"); fileinfoTmp = new FileInfo(); fileinfoTmp.setFile(files[i]); fileinfoTmp.setLastModified(files[i].lastModified()); if (!sarDirectory.exists() || fileCurrMap.get(files[i].getName()) == null && filePrevMap.get(files[i].getName()) == null) { if (sarDirectory.exists()) { deleteDir(sarDirectory); } try { extractSar(files[i], sarDirectory.getAbsolutePath()); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } //System.out.println("War Deployed Successfully in path: "+filePath); numberOfSarDeployed++; logger.info(files[i] + " Deployed"); sarsDeployed.add(files[i].getName()); filePrevMap.put(files[i].getName(), fileinfoTmp); } fileCurrMap.put(files[i].getName(), fileinfoTmp); } /*if(lastModified==null||lastModified!=files[i].lastModified()){ fileMap.put(files[i].getName(),files[i].lastModified()); }*/ } Set keyset = fileCurrMap.keySet(); Iterator ite = keyset.iterator(); String fileName; while (ite.hasNext()) { fileName = (String) ite.next(); //logger.info("fileName"+fileName); filePrevLastModified = (FileInfo) filePrevMap.get(fileName); fileCurrLastModified = (FileInfo) fileCurrMap.get(fileName); if (filePrevLastModified != null) //logger.info("lastmodified="+filePrevLastModified.getLastModified()); //System.out.println("prevmodified"+fileCurrLastModified.getLastModified()+""+filePrevLastModified.getLastModified()); if (fileCurrLastModified != null) { //System.out.println("prevmodified"+fileCurrLastModified.getLastModified()); } if (filePrevLastModified == null || filePrevLastModified.getLastModified() != fileCurrLastModified.getLastModified()) { filePath = fileCurrLastModified.getFile().getAbsolutePath(); //logger.info("filePath"+filePath); filePath = filePath.substring(0, filePath.toLowerCase().lastIndexOf(".sar")); File sarDirectory = new File(filePath + "sar"); //logger.info("WARDIRECTORY="+warDirectory.getAbsolutePath()); if (sarDirectory.exists() && sarDirectory.isDirectory()) { deleteDir(sarDirectory); sarsDeployed.remove(fileName); numberOfSarDeployed--; } try { extractSar(fileCurrLastModified.getFile(), sarDirectory.getAbsolutePath()); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } //System.out.println("War Deployed Successfully in path: "+fileCurrLastModified.getFile().getAbsolutePath()); numberOfSarDeployed++; sarsDeployed.add(fileName); logger.info(filePath + ".sar Deployed"); } } keyset = filePrevMap.keySet(); ite = keyset.iterator(); while (ite.hasNext()) { fileName = (String) ite.next(); filePrevLastModified = (FileInfo) filePrevMap.get(fileName); fileCurrLastModified = (FileInfo) fileCurrMap.get(fileName); if (fileCurrLastModified == null) { filePath = filePrevLastModified.getFile().getAbsolutePath(); filePath = filePath.substring(0, filePath.toLowerCase().lastIndexOf(".sar")); logger.info("filePath" + filePath); File deleteDirectory = new File(filePath + "sar"); deleteDir(deleteDirectory); numberOfSarDeployed--; sarsDeployed.remove(fileName); logger.info(filePath + ".sar Undeployed"); } } filePrevMap.keySet().removeAll(filePrevMap.keySet()); filePrevMap.putAll(fileCurrMap); fileCurrMap.keySet().removeAll(fileCurrMap.keySet()); //System.out.println("filePrevMap="+filePrevMap); //System.out.println("fileCurrMap="+fileCurrMap); try { Thread.sleep(3000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } catch (Exception ex) { ex.printStackTrace(); System.out.println("Sar Deployed"); } } }
From source file:org.apache.ambari.controller.Clusters.java
private synchronized void updateClusterNodesReservation(String clusterName, ClusterDefinition clsDef) throws Exception { ConcurrentHashMap<String, Node> all_nodes = nodes.getNodes(); List<String> cluster_node_range = new ArrayList<String>(); cluster_node_range.addAll(getHostnamesFromRangeExpressions(clsDef.getNodes())); /*//from w ww. j a va2 s . c o m * Reserve the nodes as specified in the node range expressions * -- throw exception, if any nodes are pre-associated with other cluster */ List<String> nodes_currently_allocated_to_cluster = new ArrayList<String>(); for (Node n : nodes.getNodes().values()) { if (n.getNodeState().getClusterName() != null && n.getNodeState().getClusterName().equals(clusterName)) { nodes_currently_allocated_to_cluster.add(n.getName()); } } List<String> nodes_to_allocate = new ArrayList<String>(cluster_node_range); nodes_to_allocate.removeAll(nodes_currently_allocated_to_cluster); List<String> nodes_to_deallocate = new ArrayList<String>(nodes_currently_allocated_to_cluster); nodes_to_deallocate.removeAll(cluster_node_range); /* * Check for any nodes that are allocated to other cluster */ List<String> preallocatedhosts = new ArrayList<String>(); for (String n : nodes_to_allocate) { if (all_nodes.containsKey(n) && (all_nodes.get(n).getNodeState().getClusterName() != null || all_nodes.get(n).getNodeState().getAllocatedToCluster())) { preallocatedhosts.add(n); } } /* * Throw exception, if some of the hosts are already allocated to other cluster */ if (!preallocatedhosts.isEmpty()) { /* * TODO: Return invalid request code and return list of preallocated nodes as a part of * response element */ String msg = "Some of the nodes specified for the cluster roles are allocated to other cluster: [" + preallocatedhosts + "]"; throw new WebApplicationException((new ExceptionResponse(msg, Response.Status.CONFLICT)).get()); } /* * Allocate nodes to given cluster */ for (String node_name : nodes_to_allocate) { if (all_nodes.containsKey(node_name)) { // Set the cluster name in the node synchronized (all_nodes.get(node_name)) { all_nodes.get(node_name).reserveNodeForCluster(clusterName, true); } } else { Date epoch = new Date(0); nodes.checkAndUpdateNode(node_name, epoch); Node node = nodes.getNode(node_name); /* * TODO: Set agentInstalled = true, unless controller uses SSH to setup the agent */ node.reserveNodeForCluster(clusterName, true); } } /* * deallocate nodes from a given cluster * TODO: Node agent would asynchronously clean up the node and notify it through heartbeat which * would reset the clusterID associated with node */ for (String node_name : nodes_to_deallocate) { if (all_nodes.containsKey(node_name)) { synchronized (all_nodes.get(node_name)) { all_nodes.get(node_name).releaseNodeFromCluster(); } } } }
From source file:org.wrml.runtime.format.text.html.WrmldocFormatter.java
protected ArrayNode buildReferencesArrayNode(final ObjectMapper objectMapper, final Map<URI, ObjectNode> schemaNodes, final Map<URI, LinkRelation> linkRelationCache, final Resource resource, final Prototype defaultPrototype) { final Context context = getContext(); final SchemaLoader schemaLoader = context.getSchemaLoader(); final SyntaxLoader syntaxLoader = context.getSyntaxLoader(); final URI defaultSchemaUri = (defaultPrototype != null) ? defaultPrototype.getSchemaUri() : null; final String defaultSchemaName = (defaultPrototype != null) ? defaultPrototype.getUniqueName().getLocalName() : null;/*ww w .j a va 2 s. c o m*/ final ArrayNode referencesNode = objectMapper.createArrayNode(); final ConcurrentHashMap<URI, LinkTemplate> referenceTemplates = resource.getReferenceTemplates(); final Set<URI> referenceRelationUris = referenceTemplates.keySet(); if (referenceTemplates != null && !referenceTemplates.isEmpty()) { String selfResponseSchemaName = null; List<String> resourceParameterList = null; final UriTemplate uriTemplate = resource.getUriTemplate(); final String[] parameterNames = uriTemplate.getParameterNames(); if (parameterNames != null && parameterNames.length > 0) { resourceParameterList = new ArrayList<>(); for (int i = 0; i < parameterNames.length; i++) { final String parameterName = parameterNames[i]; URI keyedSchemaUri = null; if (defaultPrototype != null) { final Set<String> allKeySlotNames = defaultPrototype.getAllKeySlotNames(); if (allKeySlotNames != null && allKeySlotNames.contains(parameterName)) { keyedSchemaUri = defaultSchemaUri; } } if (keyedSchemaUri == null) { final Set<URI> referenceLinkRelationUris = resource .getReferenceLinkRelationUris(Method.Get); if (referenceLinkRelationUris != null && !referenceLinkRelationUris.isEmpty()) { for (URI linkRelationUri : referenceLinkRelationUris) { final LinkTemplate referenceTemplate = referenceTemplates.get(linkRelationUri); final URI responseSchemaUri = referenceTemplate.getResponseSchemaUri(); final Prototype responseSchemaPrototype = schemaLoader .getPrototype(responseSchemaUri); if (responseSchemaPrototype != null) { final Set<String> allKeySlotNames = responseSchemaPrototype .getAllKeySlotNames(); if (allKeySlotNames != null && allKeySlotNames.contains(parameterName)) { keyedSchemaUri = responseSchemaUri; break; } } } } } String parameterTypeString = "?"; if (keyedSchemaUri != null) { final Prototype keyedPrototype = schemaLoader.getPrototype(keyedSchemaUri); final ProtoSlot keyProtoSlot = keyedPrototype.getProtoSlot(parameterName); if (keyProtoSlot instanceof PropertyProtoSlot) { final PropertyProtoSlot keyPropertyProtoSlot = (PropertyProtoSlot) keyProtoSlot; final ValueType parameterValueType = keyPropertyProtoSlot.getValueType(); final Type parameterHeapType = keyPropertyProtoSlot.getHeapValueType(); switch (parameterValueType) { case Text: { if (!String.class.equals(parameterHeapType)) { final Class<?> syntaxClass = (Class<?>) parameterHeapType; parameterTypeString = syntaxClass.getSimpleName(); } else { parameterTypeString = parameterValueType.name(); } break; } case SingleSelect: { final Class<?> choicesEnumClass = (Class<?>) parameterHeapType; if (choicesEnumClass.isEnum()) { parameterTypeString = choicesEnumClass.getSimpleName(); } else { // ? parameterTypeString = parameterValueType.name(); } break; } default: { parameterTypeString = parameterValueType.name(); break; } } } } resourceParameterList.add(parameterTypeString + " " + parameterName); } } for (final Method method : Method.values()) { for (final URI linkRelationUri : referenceRelationUris) { final LinkTemplate referenceTemplate = referenceTemplates.get(linkRelationUri); final LinkRelation linkRelation = getLinkRelation(linkRelationCache, linkRelationUri); if (method != linkRelation.getMethod()) { continue; } final ObjectNode referenceNode = objectMapper.createObjectNode(); referencesNode.add(referenceNode); referenceNode.put(PropertyName.method.name(), method.getProtocolGivenName()); referenceNode.put(PropertyName.rel.name(), syntaxLoader.formatSyntaxValue(linkRelationUri)); final String relationTitle = linkRelation.getTitle(); referenceNode.put(PropertyName.relationTitle.name(), relationTitle); final URI responseSchemaUri = referenceTemplate.getResponseSchemaUri(); String responseSchemaName = null; if (responseSchemaUri != null) { final ObjectNode responseSchemaNode = getSchemaNode(objectMapper, schemaNodes, responseSchemaUri, schemaLoader); referenceNode.put(PropertyName.responseSchema.name(), responseSchemaNode); responseSchemaName = responseSchemaNode .get(SchemaDesignFormatter.PropertyName.localName.name()).asText(); } final URI requestSchemaUri = referenceTemplate.getRequestSchemaUri(); String requestSchemaName = null; if (requestSchemaUri != null) { final ObjectNode requestSchemaNode = getSchemaNode(objectMapper, schemaNodes, requestSchemaUri, schemaLoader); referenceNode.put(PropertyName.requestSchema.name(), requestSchemaNode); requestSchemaName = requestSchemaNode .get(SchemaDesignFormatter.PropertyName.localName.name()).asText(); } final StringBuilder signatureBuilder = new StringBuilder(); if (responseSchemaName != null) { signatureBuilder.append(responseSchemaName); } else { signatureBuilder.append("void"); } signatureBuilder.append(" "); String functionName = relationTitle; if (SystemLinkRelation.self.getUri().equals(linkRelationUri)) { functionName = "get" + responseSchemaName; selfResponseSchemaName = responseSchemaName; } else if (SystemLinkRelation.save.getUri().equals(linkRelationUri)) { functionName = "save" + responseSchemaName; } else if (SystemLinkRelation.delete.getUri().equals(linkRelationUri)) { functionName = "delete"; if (defaultSchemaName != null) { functionName += defaultSchemaName; } else if (selfResponseSchemaName != null) { functionName += selfResponseSchemaName; } } signatureBuilder.append(functionName).append(" ( "); String parameterString = null; if (resourceParameterList != null) { final StringBuilder parameterStringBuilder = new StringBuilder(); final int parameterCount = resourceParameterList.size(); for (int i = 0; i < parameterCount; i++) { final String parameter = resourceParameterList.get(i); parameterStringBuilder.append(parameter); if (i < parameterCount - 1) { parameterStringBuilder.append(" , "); } } parameterString = parameterStringBuilder.toString(); signatureBuilder.append(parameterString); } if (requestSchemaName != null) { if (StringUtils.isNotBlank(parameterString)) { signatureBuilder.append(" , "); } signatureBuilder.append(requestSchemaName); signatureBuilder.append(" "); final String parameterName = Character.toLowerCase(requestSchemaName.charAt(0)) + requestSchemaName.substring(1); signatureBuilder.append(parameterName); } signatureBuilder.append(" ) "); final String signature = signatureBuilder.toString(); referenceNode.put(PropertyName.signature.name(), signature); } } } return referencesNode; }
From source file:hu.sztaki.lpds.pgportal.services.asm.ASMService.java
private JobStatisticsBean getWorkflowStatistics(String userID, String workflowID) { JobStatisticsBean statBean = new JobStatisticsBean(); int finishedjobs = 0; int errorjobs = 0; // getting jobs statuses String runtimeID = getRuntimeID(userID, workflowID); if (runtimeID != null) { // setting number of finished/error jobs!!!! ConcurrentHashMap<String, WorkflowData> workflows = PortalCacheService.getInstance().getUser(userID) .getWorkflows();//from www .j a v a 2 s .c o m WorkflowData wrk_data = workflows.get(workflowID); long finishedJobNumber = wrk_data.getFinishedStatus(); long submittedJobNumber = wrk_data.getSubmittedStatus(); long errorJobNumber = wrk_data.getErrorStatus(); // errorstatus fails!!!!! long runningJobNumber = wrk_data.getRunningStatus(); // long estimatedJobNumber = // wrk_data.getErrorStatus()+wrk_data.getFinishedStatus()+wrk_data.getRunningStatus()+wrk_data.getSubmittedStatus()+wrk_data.getSuspendStatus(); statBean.setErrorJobs(errorJobNumber); statBean.setSubmittedJobs(submittedJobNumber); statBean.setRunningJobs(runningJobNumber); statBean.setFinishedJobs(finishedJobNumber); } return statBean; }
From source file:com.opengamma.util.test.DbTool.java
/** * * @return db_name => version_number => (create_script, migrate_script) *//* w w w. ja va 2 s. c o m*/ public Map<String, Map<Integer, Pair<File, File>>> getScriptDirs() { Map<String, ConcurrentHashMap<Integer, File>> createScripts = new ConcurrentHashMap<String, ConcurrentHashMap<Integer, File>>() { private static final long serialVersionUID = 1L; @Override public ConcurrentHashMap<Integer, File> get(Object key) { super.putIfAbsent((String) key, new ConcurrentHashMap<Integer, File>()); return super.get(key); } }; Map<String, ConcurrentHashMap<Integer, File>> migrateScripts = new ConcurrentHashMap<String, ConcurrentHashMap<Integer, File>>() { private static final long serialVersionUID = 1L; @Override public ConcurrentHashMap<Integer, File> get(Object key) { super.putIfAbsent((String) key, new ConcurrentHashMap<Integer, File>()); return super.get(key); } }; for (String scriptDir : _dbScriptDirs) { Map<File, Map<Integer, File>> scripts1 = getScripts( new File(scriptDir, DATABASE_CREATE_FOLDER + File.separatorChar + _dialect.getDatabaseName()), CREATE_SCRIPT_PATTERN); for (Map.Entry<File, Map<Integer, File>> dbFolder2versionedScripts : scripts1.entrySet()) { File dbFolder = dbFolder2versionedScripts.getKey(); createScripts.get(dbFolder.getName()); // creates empty slot for dbFolder.getName() Map<Integer, File> versionedScripts = dbFolder2versionedScripts.getValue(); for (Map.Entry<Integer, File> version2script : versionedScripts.entrySet()) { Integer version = version2script.getKey(); File script = version2script.getValue(); ConcurrentHashMap<Integer, File> createDbScripts = createScripts.get(dbFolder.getName()); File prev = createDbScripts.putIfAbsent(version, script); if (prev != null) { throw new OpenGammaRuntimeException( "Can't add " + script.getAbsolutePath() + " script. Version " + version + " already added by " + prev.getAbsolutePath() + " script."); } } } Map<File, Map<Integer, File>> scripts2 = getScripts( new File(scriptDir, DATABASE_MIGRATE_FOLDER + File.separatorChar + _dialect.getDatabaseName()), MIGRATE_SCRIPT_PATTERN); for (Map.Entry<File, Map<Integer, File>> dbFolder2versionedScripts : scripts2.entrySet()) { File dbFolder = dbFolder2versionedScripts.getKey(); migrateScripts.get(dbFolder.getName()); // creates empty slot for dbFolder.getName() Map<Integer, File> versionedScripts = dbFolder2versionedScripts.getValue(); for (Map.Entry<Integer, File> version2script : versionedScripts.entrySet()) { Integer version = version2script.getKey(); File script = version2script.getValue(); ConcurrentHashMap<Integer, File> migrateDbScripts = migrateScripts.get(dbFolder.getName()); File prev = migrateDbScripts.putIfAbsent(version, script); if (prev != null) { throw new OpenGammaRuntimeException( "Can't add " + script.getAbsolutePath() + " script. Version " + version + " already added by " + prev.getAbsolutePath() + " script."); } } } } Set<String> migrateDbDirs = migrateScripts.keySet(); Set<String> createDbDirs = createScripts.keySet(); Set<String> unmatchedCreateDbDirs = difference(migrateDbDirs, createDbDirs); if (unmatchedCreateDbDirs.size() > 0) { StringBuilder errorMessage = new StringBuilder(); for (String unmatchedCreateDbDir : unmatchedCreateDbDirs) { errorMessage.append("There is no corresponding create db directory for migrate one: " + unmatchedCreateDbDir + "\n"); } throw new OpenGammaRuntimeException(errorMessage.toString()); } Set<String> unmatchedMigrateDbDirs = difference(createDbDirs, migrateDbDirs); if (unmatchedMigrateDbDirs.size() > 0) { StringBuilder errorMessage = new StringBuilder(); for (String unmatchedMigrateDbDir : unmatchedMigrateDbDirs) { errorMessage.append("There is no corresponding migrate db directory for create one: " + unmatchedMigrateDbDir + "\n"); } throw new OpenGammaRuntimeException(errorMessage.toString()); } final Map<String, Map<Integer, Pair<File, File>>> scripts = new ConcurrentHashMap<String, Map<Integer, Pair<File, File>>>() { private static final long serialVersionUID = 1L; @Override public Map<Integer, Pair<File, File>> get(Object key) { super.putIfAbsent((String) key, new ConcurrentHashMap<Integer, Pair<File, File>>()); return super.get(key); } }; for (String dir : migrateDbDirs) { ConcurrentHashMap<Integer, File> versionedCreateScripts = createScripts.get(dir); ConcurrentHashMap<Integer, File> versionedMigrateScripts = migrateScripts.get(dir); Set<Integer> migrateVersions = versionedCreateScripts.keySet(); // Set<Integer> createVersions = versionedMigrateScripts.keySet(); // // Set<Integer> unmatchedCreateVersions = difference(migrateVersions, createVersions); // if (unmatchedCreateVersions.size() > 0) { // StringBuilder errorMessage = new StringBuilder(); // for (Integer unmatchedCreateVersion : unmatchedCreateVersions) { // errorMessage.append("There is no corresponding version of create script for the migrate one: " + DATABASE_CRAETE_SCRIPT_PREFIX + unmatchedCreateVersion + "\n"); // } // throw new OpenGammaRuntimeException(errorMessage.toString()); // } // // Set<Integer> unmatchedMigrateVersions = difference(createVersions, migrateVersions); // if (unmatchedMigrateVersions.size() > 0) { // StringBuilder errorMessage = new StringBuilder(); // for (Integer unmatchedMigrateVersion : unmatchedMigrateVersions) { // errorMessage.append("There is no corresponding version of migrate script for the create one: " + DATABASE_MIGRATE_SCRIPT_PREFIX + unmatchedMigrateVersion + "\n"); // } // throw new OpenGammaRuntimeException(errorMessage.toString()); // } for (Integer version : migrateVersions) { File createScript = versionedCreateScripts.get(version); File migrateScript = versionedMigrateScripts.get(version); scripts.get(dir).put(version, Pair.of(createScript, migrateScript)); } } if (scripts.isEmpty()) { throw new OpenGammaRuntimeException("No script directories found: " + _dbScriptDirs); } return scripts; }