List of usage examples for java.util.concurrent ConcurrentHashMap keySet
KeySetView keySet
To view the source code for java.util.concurrent ConcurrentHashMap keySet.
Click Source Link
From source file:com.opengamma.util.test.DbTool.java
/** * * @return db_name => version_number => (create_script, migrate_script) *//*from www .j av a 2 s. c om*/ public Map<String, Map<Integer, Pair<File, File>>> getScriptDirs() { Map<String, ConcurrentHashMap<Integer, File>> createScripts = new ConcurrentHashMap<String, ConcurrentHashMap<Integer, File>>() { private static final long serialVersionUID = 1L; @Override public ConcurrentHashMap<Integer, File> get(Object key) { super.putIfAbsent((String) key, new ConcurrentHashMap<Integer, File>()); return super.get(key); } }; Map<String, ConcurrentHashMap<Integer, File>> migrateScripts = new ConcurrentHashMap<String, ConcurrentHashMap<Integer, File>>() { private static final long serialVersionUID = 1L; @Override public ConcurrentHashMap<Integer, File> get(Object key) { super.putIfAbsent((String) key, new ConcurrentHashMap<Integer, File>()); return super.get(key); } }; for (String scriptDir : _dbScriptDirs) { Map<File, Map<Integer, File>> scripts1 = getScripts( new File(scriptDir, DATABASE_CREATE_FOLDER + File.separatorChar + _dialect.getDatabaseName()), CREATE_SCRIPT_PATTERN); for (Map.Entry<File, Map<Integer, File>> dbFolder2versionedScripts : scripts1.entrySet()) { File dbFolder = dbFolder2versionedScripts.getKey(); createScripts.get(dbFolder.getName()); // creates empty slot for dbFolder.getName() Map<Integer, File> versionedScripts = dbFolder2versionedScripts.getValue(); for (Map.Entry<Integer, File> version2script : versionedScripts.entrySet()) { Integer version = version2script.getKey(); File script = version2script.getValue(); ConcurrentHashMap<Integer, File> createDbScripts = createScripts.get(dbFolder.getName()); File prev = createDbScripts.putIfAbsent(version, script); if (prev != null) { throw new OpenGammaRuntimeException( "Can't add " + script.getAbsolutePath() + " script. Version " + version + " already added by " + prev.getAbsolutePath() + " script."); } } } Map<File, Map<Integer, File>> scripts2 = getScripts( new File(scriptDir, DATABASE_MIGRATE_FOLDER + File.separatorChar + _dialect.getDatabaseName()), MIGRATE_SCRIPT_PATTERN); for (Map.Entry<File, Map<Integer, File>> dbFolder2versionedScripts : scripts2.entrySet()) { File dbFolder = dbFolder2versionedScripts.getKey(); migrateScripts.get(dbFolder.getName()); // creates empty slot for dbFolder.getName() Map<Integer, File> versionedScripts = dbFolder2versionedScripts.getValue(); for (Map.Entry<Integer, File> version2script : versionedScripts.entrySet()) { Integer version = version2script.getKey(); File script = version2script.getValue(); ConcurrentHashMap<Integer, File> migrateDbScripts = migrateScripts.get(dbFolder.getName()); File prev = migrateDbScripts.putIfAbsent(version, script); if (prev != null) { throw new OpenGammaRuntimeException( "Can't add " + script.getAbsolutePath() + " script. Version " + version + " already added by " + prev.getAbsolutePath() + " script."); } } } } Set<String> migrateDbDirs = migrateScripts.keySet(); Set<String> createDbDirs = createScripts.keySet(); Set<String> unmatchedCreateDbDirs = difference(migrateDbDirs, createDbDirs); if (unmatchedCreateDbDirs.size() > 0) { StringBuilder errorMessage = new StringBuilder(); for (String unmatchedCreateDbDir : unmatchedCreateDbDirs) { errorMessage.append("There is no corresponding create db directory for migrate one: " + unmatchedCreateDbDir + "\n"); } throw new OpenGammaRuntimeException(errorMessage.toString()); } Set<String> unmatchedMigrateDbDirs = difference(createDbDirs, migrateDbDirs); if (unmatchedMigrateDbDirs.size() > 0) { StringBuilder errorMessage = new StringBuilder(); for (String unmatchedMigrateDbDir : unmatchedMigrateDbDirs) { errorMessage.append("There is no corresponding migrate db directory for create one: " + unmatchedMigrateDbDir + "\n"); } throw new OpenGammaRuntimeException(errorMessage.toString()); } final Map<String, Map<Integer, Pair<File, File>>> scripts = new ConcurrentHashMap<String, Map<Integer, Pair<File, File>>>() { private static final long serialVersionUID = 1L; @Override public Map<Integer, Pair<File, File>> get(Object key) { super.putIfAbsent((String) key, new ConcurrentHashMap<Integer, Pair<File, File>>()); return super.get(key); } }; for (String dir : migrateDbDirs) { ConcurrentHashMap<Integer, File> versionedCreateScripts = createScripts.get(dir); ConcurrentHashMap<Integer, File> versionedMigrateScripts = migrateScripts.get(dir); Set<Integer> migrateVersions = versionedCreateScripts.keySet(); // Set<Integer> createVersions = versionedMigrateScripts.keySet(); // // Set<Integer> unmatchedCreateVersions = difference(migrateVersions, createVersions); // if (unmatchedCreateVersions.size() > 0) { // StringBuilder errorMessage = new StringBuilder(); // for (Integer unmatchedCreateVersion : unmatchedCreateVersions) { // errorMessage.append("There is no corresponding version of create script for the migrate one: " + DATABASE_CRAETE_SCRIPT_PREFIX + unmatchedCreateVersion + "\n"); // } // throw new OpenGammaRuntimeException(errorMessage.toString()); // } // // Set<Integer> unmatchedMigrateVersions = difference(createVersions, migrateVersions); // if (unmatchedMigrateVersions.size() > 0) { // StringBuilder errorMessage = new StringBuilder(); // for (Integer unmatchedMigrateVersion : unmatchedMigrateVersions) { // errorMessage.append("There is no corresponding version of migrate script for the create one: " + DATABASE_MIGRATE_SCRIPT_PREFIX + unmatchedMigrateVersion + "\n"); // } // throw new OpenGammaRuntimeException(errorMessage.toString()); // } for (Integer version : migrateVersions) { File createScript = versionedCreateScripts.get(version); File migrateScript = versionedMigrateScripts.get(version); scripts.get(dir).put(version, Pair.of(createScript, migrateScript)); } } if (scripts.isEmpty()) { throw new OpenGammaRuntimeException("No script directories found: " + _dbScriptDirs); } return scripts; }
From source file:cn.leancloud.diamond.client.impl.DefaultDiamondSubscriber.java
private Set<String> testData() { Set<String> dataIdList = new HashSet<String>(); for (String dataId : this.cache.keySet()) { ConcurrentHashMap<String, CacheData> cacheDatas = this.cache.get(dataId); for (String group : cacheDatas.keySet()) { if (null != MockServer.getUpdateConfigInfo(dataId, group)) { dataIdList.add(dataId + Constants.WORD_SEPARATOR + group); }/*from w w w .java 2 s . c om*/ } } return dataIdList; }
From source file:com.web.server.EARDeployer.java
/** * This method configures the executor services from the jar file. * /*from w ww . ja v a 2 s . co m*/ * @param jarFile * @param classList * @throws FileSystemException */ public void deployExecutorServicesEar(String earFileName, FileObject earFile, StandardFileSystemManager fsManager) throws FileSystemException { try { System.out.println("EARFILE NAMEs=" + earFileName); CopyOnWriteArrayList<FileObject> fileObjects = new CopyOnWriteArrayList<FileObject>(); CopyOnWriteArrayList<FileObject> warObjects = new CopyOnWriteArrayList<FileObject>(); ConcurrentHashMap jarClassListMap = new ConcurrentHashMap(); CopyOnWriteArrayList<String> classList; obtainUrls(earFile, earFile, fileObjects, jarClassListMap, warObjects, fsManager); VFSClassLoader customClassLoaderBaseLib = new VFSClassLoader( fileObjects.toArray(new FileObject[fileObjects.size()]), fsManager, Thread.currentThread().getContextClassLoader()); VFSClassLoader customClassLoader = null; Set keys = jarClassListMap.keySet(); Iterator key = keys.iterator(); FileObject jarFileObject; ConcurrentHashMap classLoaderPath = new ConcurrentHashMap(); filesMap.put(earFileName, classLoaderPath); for (FileObject warFileObj : warObjects) { if (warFileObj.getName().getBaseName().endsWith(".war")) { //logger.info("filePath"+filePath); String filePath = scanDirectory + "/" + warFileObj.getName().getBaseName(); log.info(filePath); String fileName = warFileObj.getName().getBaseName(); WebClassLoader classLoader = new WebClassLoader(new URL[] {}); log.info(classLoader); warDeployer.deleteDir( new File(deployDirectory + "/" + fileName.substring(0, fileName.lastIndexOf(".war")))); new File(deployDirectory + "/" + fileName.substring(0, fileName.lastIndexOf(".war"))).mkdirs(); log.info(deployDirectory + "/" + fileName.substring(0, fileName.lastIndexOf(".war"))); urlClassLoaderMap.put( deployDirectory + "/" + fileName.substring(0, fileName.lastIndexOf(".war")), classLoader); classLoaderPath.put(warFileObj.getName().getBaseName(), deployDirectory + "/" + fileName.substring(0, fileName.lastIndexOf(".war"))); warDeployer.extractWar(new File(filePath), classLoader); if (exec != null) { exec.shutdown(); } new File(scanDirectory + "/" + warFileObj.getName().getBaseName()).delete(); exec = Executors.newSingleThreadScheduledExecutor(); exec.scheduleAtFixedRate(task, 0, 1000, TimeUnit.MILLISECONDS); } } for (int keyCount = 0; keyCount < keys.size(); keyCount++) { jarFileObject = (FileObject) key.next(); { classList = (CopyOnWriteArrayList<String>) jarClassListMap.get(jarFileObject); customClassLoader = new VFSClassLoader(jarFileObject, fsManager, customClassLoaderBaseLib); this.urlClassLoaderMap.put( scanDirectory + "/" + earFileName + "/" + jarFileObject.getName().getBaseName(), customClassLoader); classLoaderPath.put(jarFileObject.getName().getBaseName(), scanDirectory + "/" + earFileName + "/" + jarFileObject.getName().getBaseName()); for (int classCount = 0; classCount < classList.size(); classCount++) { String classwithpackage = classList.get(classCount).substring(0, classList.get(classCount).indexOf(".class")); classwithpackage = classwithpackage.replace("/", "."); // System.out.println("classList:"+classwithpackage.replace("/",".")); try { if (!classwithpackage.contains("$")) { /*System.out.println("EARFILE NAME="+fileName); System.out .println(scanDirectory + "/" + fileName + "/" + jarFileObject.getName() .getBaseName()); System.out.println(urlClassLoaderMap);*/ Class executorServiceClass = customClassLoader.loadClass(classwithpackage); Annotation[] classServicesAnnot = executorServiceClass.getDeclaredAnnotations(); if (classServicesAnnot != null) { for (int annotcount = 0; annotcount < classServicesAnnot.length; annotcount++) { if (classServicesAnnot[annotcount] instanceof RemoteCall) { RemoteCall remoteCall = (RemoteCall) classServicesAnnot[annotcount]; //registry.unbind(remoteCall.servicename()); System.out.println(remoteCall.servicename().trim()); try { for (int count = 0; count < 2; count++) { RemoteInterface reminterface = (RemoteInterface) UnicastRemoteObject .exportObject( (Remote) executorServiceClass.newInstance(), 0); registry.rebind(remoteCall.servicename().trim(), reminterface); } } catch (Exception ex) { ex.printStackTrace(); } } } } // System.out.println(executorServiceClass.newInstance()); // System.out.println("executor class in ExecutorServicesConstruct"+executorServiceClass); // System.out.println(); Method[] methods = executorServiceClass.getDeclaredMethods(); for (Method method : methods) { Annotation[] annotations = method.getDeclaredAnnotations(); for (Annotation annotation : annotations) { if (annotation instanceof ExecutorServiceAnnot) { ExecutorServiceAnnot executorServiceAnnot = (ExecutorServiceAnnot) annotation; ExecutorServiceInfo executorServiceInfo = new ExecutorServiceInfo(); executorServiceInfo.setExecutorServicesClass(executorServiceClass); executorServiceInfo.setMethod(method); executorServiceInfo.setMethodParams(method.getParameterTypes()); // System.out.println("serice name=" // + executorServiceAnnot // .servicename()); // System.out.println("method info=" // + executorServiceInfo); // System.out.println(method); // if(servicesMap.get(executorServiceAnnot.servicename())==null)throw // new Exception(); executorServiceMap.put(executorServiceAnnot.servicename(), executorServiceInfo); } } } } } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } } jarFileObject.close(); } for (FileObject fobject : fileObjects) { fobject.close(); } System.out.println("Channel unlocked"); earFile.close(); fsManager.closeFileSystem(earFile.getFileSystem()); // ClassLoaderUtil.closeClassLoader(customClassLoader); } catch (Exception ex) { ex.printStackTrace(); } }
From source file:com.app.server.WarDeployer.java
public void init(Vector serviceList, ServerConfig serverConfig, MBeanServer mbeanServer) { try {/*from w w w . ja v a2 s . co m*/ this.serviceList = serviceList; this.serverConfig = serverConfig; this.mbeanServer = mbeanServer; this.scanDirectory = serverConfig.getDeploydirectory(); DigesterLoader serverdigesterLoader = DigesterLoader.newLoader(new FromXmlRulesModule() { protected void loadRules() { try { loadXMLRules(new InputSource(new FileInputStream("./config/executorservices-config.xml"))); } catch (Exception e) { log.error("Could not able to load config xml rules ./config/executorservices-config.xml", e); //e.printStackTrace(); } } }); serverdigester = serverdigesterLoader.newDigester(); } catch (Exception e1) { log.error("Could not create digester executorservices-config.xml", e1); //e1.printStackTrace(); } try { DigesterLoader serverdigesterLoader = DigesterLoader.newLoader(new FromXmlRulesModule() { protected void loadRules() { try { loadXMLRules(new InputSource(new FileInputStream("./config/messagingclass-rules.xml"))); } catch (FileNotFoundException e) { log.error("Could not able to load config xml rules ./config/messagingclass-rules.xml", e); //e.printStackTrace(); } } }); messagedigester = serverdigesterLoader.newDigester(); DigesterLoader messagingdigesterLoader = DigesterLoader.newLoader(new FromXmlRulesModule() { protected void loadRules() { // TODO Auto-generated method stub try { loadXMLRules(new InputSource(new FileInputStream("./config/messagingconfig-rules.xml"))); } catch (Exception e) { log.error("Could not able to load xml config file ./config/messagingclass-rules.xml", e); e.printStackTrace(); } } }); Digester messagingdigester = messagingdigesterLoader.newDigester(); messagingElem = (MessagingElem) messagingdigester .parse(new InputSource(new FileInputStream("./config/messaging.xml"))); synchronized (messagingElem) { ConcurrentHashMap randomQueue = messagingElem.randomQueue; Set<String> randomQueueSet = randomQueue.keySet(); Iterator<String> ite = randomQueueSet.iterator(); while (ite.hasNext()) { Queue queue = (Queue) randomQueue.get(ite.next()); ConcurrentHashMap randomqueuemap = (ConcurrentHashMap) messagingClassMap.get("RandomQueue"); if (randomqueuemap == null) { randomqueuemap = new ConcurrentHashMap(); messagingClassMap.put("RandomQueue", randomqueuemap); } CopyOnWriteArrayList randomqueuelist = (CopyOnWriteArrayList) randomqueuemap .get(queue.getQueuename()); if (randomqueuelist == null) randomqueuemap.put(queue.getQueuename(), new CopyOnWriteArrayList()); } ConcurrentHashMap roundrobinQueue = messagingElem.roundrobinQueue; Set<String> roundrobinQueueSet = roundrobinQueue.keySet(); ite = roundrobinQueueSet.iterator(); while (ite.hasNext()) { Queue queue = (Queue) roundrobinQueue.get(ite.next()); ConcurrentHashMap roundrobinqueuemap = (ConcurrentHashMap) messagingClassMap .get("RoundRobinQueue"); if (roundrobinqueuemap == null) { roundrobinqueuemap = new ConcurrentHashMap(); messagingClassMap.put("RoundRobinQueue", roundrobinqueuemap); } CopyOnWriteArrayList randomqueuelist = (CopyOnWriteArrayList) roundrobinqueuemap .get(queue.getQueuename()); if (randomqueuelist == null) roundrobinqueuemap.put(queue.getQueuename(), new CopyOnWriteArrayList()); } ConcurrentHashMap topicMap = messagingElem.topicMap; Set<String> topicSet = topicMap.keySet(); Iterator<String> iter = topicSet.iterator(); while (iter.hasNext()) { Topic topic = (Topic) topicMap.get(iter.next()); ConcurrentHashMap topicmap = (ConcurrentHashMap) messagingClassMap.get("Topic"); if (topicmap == null) { topicmap = new ConcurrentHashMap(); messagingClassMap.put("Topic", topicmap); } CopyOnWriteArrayList randomqueuelist = (CopyOnWriteArrayList) topicmap .get(topic.getTopicname()); if (randomqueuelist == null) topicmap.put(topic.getTopicname(), new CopyOnWriteArrayList()); } //log.info(messagingClassMap); } } catch (Exception e1) { log.error("", e1); //e1.printStackTrace(); } try { DigesterLoader serverdigesterLoader = DigesterLoader.newLoader(new FromXmlRulesModule() { protected void loadRules() { try { loadXMLRules(new InputSource(new FileInputStream("./config/webxml-rules.xml"))); } catch (FileNotFoundException e) { log.error("could not able to load xml config rules ./config/webxml-rules.xml", e); //e.printStackTrace(); } } }); webxmldigester = serverdigesterLoader.newDigester(); } catch (Exception ex) { log.error("could not able to create web.xml digester", ex); // ex.printStackTrace(); } log.info("initialized"); }
From source file:com.thoughtworks.go.server.service.PipelineConfigServicePerformanceTest.java
private void run(Runnable runnable, int numberOfRequests, final ConcurrentHashMap<String, Boolean> results) throws InterruptedException { Boolean finalResult = true;/*from ww w. j a v a 2s .c o m*/ LOGGER.info("Tests start now!"); final ArrayList<Thread> threads = new ArrayList<>(); for (int i = 0; i < numberOfRequests; i++) { Thread t = new Thread(runnable, "pipeline" + i); threads.add(t); } for (Thread t : threads) { Thread.sleep(1000 * (new Random().nextInt(3) + 1)); t.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { public void uncaughtException(Thread t, Throwable e) { LOGGER.error("Exception " + e + " from thread " + t); results.put(t.getName(), false); } }); t.start(); } for (Thread t : threads) { int i = threads.indexOf(t); if (i == (numberOfRequests - 1)) { // takeHeapDump(dumpDir, i); } t.join(); } for (String threadId : results.keySet()) { finalResult = results.get(threadId) && finalResult; } assertThat(finalResult, is(true)); }
From source file:com.ibm.crail.tools.CrailBenchmark.java
void locationMap() throws Exception { ConcurrentHashMap<String, String> locationMap = new ConcurrentHashMap<String, String>(); CrailUtils.parseMap(CrailConstants.LOCATION_MAP, locationMap); System.out.println("Parsing locationMap " + CrailConstants.LOCATION_MAP); for (String key : locationMap.keySet()) { System.out.println("key " + key + ", value " + locationMap.get(key)); }// w ww. j a v a 2s .c o m }
From source file:com.starit.diamond.client.impl.DefaultDiamondSubscriber.java
private Set<String> testData() { Set<String> dataIdList = new HashSet<String>(); for (String dataId : this.cache.keySet()) { ConcurrentHashMap<String, CacheData> cacheDatas = this.cache.get(dataId); for (String group : cacheDatas.keySet()) { if (null != MockServer.getUpdateConfigInfo(dataId, group)) { dataIdList.add(dataId + WORD_SEPARATOR + group); }/*from w ww.j a va 2 s. c om*/ } } return dataIdList; }
From source file:com.web.server.WebServer.java
/** * This methos is the implementation of the HTPP request and sends the response. */// w w w.j a v a 2 s . c o m public void run() { byte[] response; byte[] content; byte[] uploadData = null; HttpHeaderClient httpHeaderClient = null; InputStream istream = null; OutputStream ostream = null; HttpHeaderServer serverParam = new HttpHeaderServer(); StringBuffer buffer = new StringBuffer(); String value; char c; String endvalue = "\r\n\r\n"; String urlFormEncoded; int responseCode; try { ////System.out.println("value="); istream = socket.getInputStream(); BufferedInputStream bistr = new BufferedInputStream(istream); //socket.setReceiveBufferSize(10000); //System.out.println("value1="); int availbleStream; int totalBytRead = 0; int ch; ByteArrayOutputStream bytout = new ByteArrayOutputStream(); ByteArrayOutputStream contentout = new ByteArrayOutputStream(); //System.out.println(istream.available()); int bytesRead; int endbytIndex = 0; int contentbytIndex = 0; boolean httpHeaderEndFound = false; byte[] byt; while ((ch = bistr.read()) != -1) { bytout.write(ch); if (!httpHeaderEndFound && (char) ch == endvalue.charAt(endbytIndex)) { endbytIndex++; if (endbytIndex == endvalue.length()) { byt = bytout.toByteArray(); value = new String(ObtainBytes(byt, 0, byt.length - 4)); //System.out.println(value); httpHeaderClient = parseHttpHeaders(value); httpHeaderEndFound = true; bytout.close(); endbytIndex = 0; if (httpHeaderClient.getContentLength() == null) break; } } else { endbytIndex = 0; } if (httpHeaderClient != null && httpHeaderEndFound) { contentout.write(ch); contentbytIndex++; if (httpHeaderClient.getContentLength() != null && contentbytIndex >= Integer.parseInt(httpHeaderClient.getContentLength())) { break; } } totalBytRead++; } /*while(totalBytRead==0){ while((ch = bistr.read())!=-1){ System.out.println((char)ch); ////System.out.println("availableStream="+availbleStream); bytarrayOutput.write(ch); totalBytRead++; } }*/ if (totalBytRead == 0) { System.out.println("Since byte is 0 sock and istream os closed"); //istream.close(); socket.close(); return; } //istream.read(bt,0,9999999); System.out.println("bytes read"); byte[] contentByte = contentout.toByteArray(); contentout.close(); //System.out.println("String="+new String(bt)); /*int index=containbytes(bt,endvalue.getBytes()); if(index==-1)index=totalBytRead; value=new String(ObtainBytes(bt,0,index));*/ System.out.println("value2="); ConcurrentHashMap<String, HttpCookie> httpCookies = httpHeaderClient.getCookies(); HttpSessionServer session = null; if (httpCookies != null) { Iterator<String> cookieNames = httpCookies.keySet().iterator(); for (; cookieNames.hasNext();) { String cookieName = cookieNames.next(); //System.out.println(cookieName+" "+httpCookies.get(cookieName).getValue()); if (cookieName.equals("SERVERSESSIONID")) { session = (HttpSessionServer) sessionObjects.get(httpCookies.get(cookieName).getValue()); httpHeaderClient.setSession(session); //break; } } } //System.out.println("Session="+session); if (session == null) { HttpCookie cookie = new HttpCookie(); cookie.setKey("SERVERSESSIONID"); cookie.setValue(UUID.randomUUID().toString()); httpCookies.put("SERVERSESSIONID", cookie); session = new HttpSessionServer(); sessionObjects.put(cookie.getValue(), session); httpHeaderClient.setSession(session); } if (httpHeaderClient.getContentType() != null && httpHeaderClient.getContentType().equals(HttpHeaderParamNames.MULTIPARTFORMDATAVALUE)) { ////System.out.println(new String(uploadData)); ConcurrentHashMap paramMap = new MultipartFormData().parseContent(contentByte, httpHeaderClient); httpHeaderClient.setParameters(paramMap); ////logger.info(uploadData); } else if (httpHeaderClient.getContentType() != null && httpHeaderClient.getContentType().equals(HttpHeaderParamNames.URLENCODED)) { urlFormEncoded = new String(contentByte); ConcurrentHashMap paramMap = parseUrlEncoded(urlFormEncoded); httpHeaderClient.setParameters(paramMap); } ////logger.info(serverconfig.getDeploydirectory()+httpHeaderClient.getResourceToObtain()); ////System.out.println("value3="); ////logger.info(new String(bt)); serverParam.setContentType("text/html"); URLDecoder decoder = new URLDecoder(); System.out.println("content Length= " + socket); responseCode = 200; File file = new File(deployDirectory + decoder.decode(httpHeaderClient.getResourceToObtain())); FileContent fileContent = (FileContent) cache.get(httpHeaderClient.getResourceToObtain()); if (fileContent != null && file.lastModified() == fileContent.getLastModified()) { System.out.println("In cache"); content = (byte[]) fileContent.getContent(); } else { content = ObtainContentExecutor(deployDirectory, httpHeaderClient.getResourceToObtain(), httpHeaderClient, serverdigester, urlClassLoaderMap, servletMapping, session); System.out.println("content Length2= " + content); if (content == null) { //System.out.println("In caching content"); content = obtainContent( deployDirectory + decoder.decode(httpHeaderClient.getResourceToObtain())); if (content != null) { fileContent = new FileContent(); fileContent.setContent(content); fileContent.setFileName(httpHeaderClient.getResourceToObtain()); fileContent.setLastModified(file.lastModified()); cache.put(httpHeaderClient.getResourceToObtain(), fileContent); } } ////System.out.println("value4="); } if (content == null) { responseCode = 404; content = ("<html><body><H1>The Request resource " + httpHeaderClient.resourceToObtain + " Not Found</H1><body></html>").getBytes(); } ////System.out.println("content Length3= "); serverParam.setContentLength("" + (content.length + 4)); if (httpHeaderClient.getResourceToObtain().endsWith(".ico")) { serverParam.setContentType("image/png"); } ////System.out.println("value5="); ////System.out.println("content Length4= "); response = formHttpResponseHeader(responseCode, serverParam, content, httpHeaderClient.getCookies()); ////System.out.println("value6="); ostream = socket.getOutputStream(); //logger.info("Response="+new String(response)); //System.out.println("value6="); //logger.info("Response="+new String(response)); ////System.out.println("content "+"Response="+new String(response)); ostream.write(response); ostream.flush(); ostream.close(); socket.close(); } catch (IOException e) { Socket socket; e.printStackTrace(); //logger.error(e); try { socket = new Socket("localhost", shutdownPort); OutputStream outputStream = socket.getOutputStream(); outputStream.write("shutdown WebServer\r\n\r\n".getBytes()); outputStream.close(); } catch (Exception ex) { ex.printStackTrace(); } e.printStackTrace(); } catch (NumberFormatException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } }
From source file:hu.sztaki.lpds.pgportal.services.asm.ASMService.java
public String getRuntimeID(String userID, String workflowID) { ConcurrentHashMap runtimes = ((ConcurrentHashMap) PortalCacheService.getInstance().getUser(userID) .getWorkflow(workflowID).getAllRuntimeInstance()); if (runtimes.size() > 0) { Object firstID = runtimes.keySet().iterator().next(); return firstID.toString(); }/*from w w w .j av a 2 s. c o m*/ return null; }
From source file:eu.itesla_project.online.db.OnlineDbMVStore.java
private void serializeStoredWorkflowsStates() { LOGGER.info("Serializing stored workflows states"); for (String workflowId : workflowsStates.keySet()) { if (workflowStatesFolderExists(workflowId)) { LOGGER.info("Serializing network data of workflow {}", workflowId); ConcurrentHashMap<Integer, Map<HistoDbAttributeId, Object>> workflowStates = workflowsStates .get(workflowId);//from w w w. j av a2s.c o m Path workflowStatesFolder = getWorkflowStatesFolder(workflowId); Path csvFile = Paths.get(workflowStatesFolder.toString(), SERIALIZED_STATES_FILENAME); try (FileWriter fileWriter = new FileWriter(csvFile.toFile()); CsvListWriter csvWriter = new CsvListWriter(fileWriter, new CsvPreference.Builder('"', ';', "\r\n").build())) { boolean printHeaders = true; for (Integer stateId : workflowStates.keySet()) { Map<HistoDbAttributeId, Object> networkValues = workflowStates.get(stateId); if (printHeaders) { List<String> headers = new ArrayList<>(networkValues.size()); for (HistoDbAttributeId attrId : networkValues.keySet()) { headers.add(attrId.toString()); } ArrayList<String> headersList = new ArrayList<>(); headersList.add("workflow"); headersList.add("state"); headersList.addAll(Arrays.asList(headers.toArray(new String[] {}))); csvWriter.writeHeader(headersList.toArray(new String[] {})); printHeaders = false; } ArrayList<Object> valuesList = new ArrayList<>(); valuesList.add(workflowId); valuesList.add(stateId); valuesList.addAll(Arrays.asList(networkValues.values().toArray())); csvWriter.write(valuesList.toArray()); } } catch (IOException e) { LOGGER.error("Error serializing network data for workflow {}", workflowId); } } } }