List of usage examples for org.json.simple JSONObject keySet
Set<K> keySet();
From source file:Report.CReport.java
public static void genReport(JSONArray pObjAry) throws IOException, COSVisitorException { String imagePath = "C:\\Users\\Bryden\\Desktop\\pie-sample.png"; List<List<String>> lstContents = new ArrayList<>(); List<String> aryLst = new ArrayList<>(); aryLst.add("Incident Type"); aryLst.add(""); lstContents.add(aryLst);/* w ww . j av a 2 s.co m*/ for (Object obj : pObjAry) { JSONObject objJson = (JSONObject) obj; Iterator<?> keys = objJson.keySet().iterator(); while (keys.hasNext()) { String key = (String) keys.next(); // loop to get the dynamic key String value = (String) objJson.get(key); List<String> aryValues = new ArrayList<>(); aryValues.add(key); aryValues.add(value); lstContents.add(aryValues); } } try (// Create a document and add a page to it PDDocument document = new PDDocument()) { PDPage page = new PDPage(PDPage.PAGE_SIZE_A4); document.addPage(page); // Create a new font object selecting one of the PDF base fonts PDFont font = PDType1Font.HELVETICA_BOLD; InputStream in = Files.newInputStream(Paths.get(imagePath)); PDJpeg img = new PDJpeg(document, in); // Define a text content stream using the selected font, moving the cursor and drawing the text "Hello World" try (// Start a new content stream which will "hold" the to be created content PDPageContentStream contentStream = new PDPageContentStream(document, page)) { // Define a text content stream using the selected font, moving the cursor and drawing the text "Hello World" contentStream.beginText(); contentStream.setFont(font, 20); contentStream.moveTextPositionByAmount(70, 720); contentStream.drawString("Incident Summary " + new Date()); contentStream.endText(); contentStream.beginText(); contentStream.setFont(font, 20); contentStream.moveTextPositionByAmount(100, 670); contentStream.drawString("Statistics"); contentStream.endText(); contentStream.drawImage(img, 10, 10); drawTable(page, contentStream, 650, 100, lstContents); // Make sure that the content stream is closed: } img.clear(); // Save the results and ensure that the document is properly closed: document.save("Hello World.pdf"); } }
From source file:rsreflection.HookImporter.java
public static HashMap<String, FieldInfo> readJSON(String path) { HashMap<String, FieldInfo> hookMap = new HashMap(); try {/*from w w w .j av a 2 s.c om*/ JSONParser parser = new JSONParser(); Object obj = parser.parse(new FileReader(path)); JSONObject jsonObject = (JSONObject) obj; Set keys = jsonObject.keySet(); for (Object s : keys) { JSONObject cur = (JSONObject) jsonObject.get(s); FieldInfo f = new FieldInfo(cur.get("className").toString(), cur.get("fieldName").toString(), Integer.parseInt(cur.get("multiplier").toString())); hookMap.put(s.toString(), f); } return hookMap; } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } catch (Exception ex) { Logger.getLogger(HookImporter.class.getName()).log(Level.SEVERE, null, ex); } return hookMap; }
From source file:sce.ElasticJob.java
@Override public void execute(JobExecutionContext context) throws JobExecutionException { try {//from ww w . jav a 2 s.c om // build the list of queries ArrayList<String> queries = new ArrayList<>(); JobDataMap jobDataMap = context.getJobDetail().getJobDataMap(); String url = jobDataMap.getString("#url"); String elasticJob = jobDataMap.getString("#elasticJobConstraints"); int counter = 0; String expression = ""; String j_tmp = ""; JSONParser parser = new JSONParser(); JSONObject jsonobject = (JSONObject) parser.parse(elasticJob); Iterator<?> keys = jsonobject.keySet().iterator(); while (keys.hasNext()) { String i = (String) keys.next(); JSONObject jsonobject2 = (JSONObject) jsonobject.get(i); Iterator<?> keys2 = jsonobject2.keySet().iterator(); while (keys2.hasNext()) { String j = (String) keys2.next(); JSONObject jsonobject3 = (JSONObject) jsonobject2.get(j); String configuration = ""; if (jsonobject3.get("slaconfiguration") != null) { configuration = (String) jsonobject3.get("slaconfiguration"); } else if (jsonobject3.get("bcconfiguration") != null) { configuration = (String) jsonobject3.get("bcconfiguration"); } else if (jsonobject3.get("vmconfiguration") != null) { configuration = (String) jsonobject3.get("vmconfiguration"); } else if (jsonobject3.get("anyconfiguration") != null) { configuration = (String) jsonobject3.get("anyconfiguration"); } // add the query to the queries list queries.add(getQuery((String) jsonobject3.get("metric"), (String) jsonobject3.get("cfg"), configuration, (String) jsonobject3.get("relation"), String.valueOf(jsonobject3.get("threshold")), String.valueOf(jsonobject3.get("time")), (String) jsonobject3.get("timeselect"))); String op = jsonobject3.get("match") != null ? (String) jsonobject3.get("match") : ""; switch (op) { case "": break; case "any": op = "OR("; break; case "all": op = "AND("; break; } String closed_parenthesis = " "; int num_closed_parenthesis = !j_tmp.equals("") ? Integer.parseInt(j_tmp) - Integer.parseInt(j) : 0; for (int parenthesis = 0; parenthesis < num_closed_parenthesis; parenthesis++) { closed_parenthesis += " )"; } expression += op + " " + counter + closed_parenthesis; j_tmp = j; counter++; } } ExpressionTree calc = new ExpressionTree(new Scanner(expression), queries); if (calc.evaluate()) { URL u = new URL(url); //get user credentials from URL, if present final String usernamePassword = u.getUserInfo(); //set the basic authentication credentials for the connection if (usernamePassword != null) { Authenticator.setDefault(new Authenticator() { @Override protected PasswordAuthentication getPasswordAuthentication() { return new PasswordAuthentication(usernamePassword.split(":")[0], usernamePassword.split(":")[1].toCharArray()); } }); } //call the callUrl URLConnection connection = u.openConnection(); getUrlContents(connection); } } catch (Exception e) { e.printStackTrace(); throw new JobExecutionException(e); } }
From source file:sce.Main.java
public TriggerBuilder buildTrigger(JSONObject jsonObject) { try {// ww w . j av a 2s. co m TriggerBuilder tmp = newTrigger(); SimpleScheduleBuilder sbt = SimpleScheduleBuilder.simpleSchedule(); Object t = tmp; boolean withSchedule = false; //set to true if a schedule is used Iterator it = jsonObject.keySet().iterator(); JSONArray array; while (it.hasNext()) { String key = (String) it.next(); Object value = jsonObject.get(key); switch (key) { //Set the time at which the Trigger will no longer fire - even if it's schedule has remaining repeats case "endAt": t = ((TriggerBuilder<Trigger>) t).endAt(new Date(Long.parseLong((String) value))); //number of milliseconds from January 1st 1970 break; //Set the identity of the Job which should be fired by the produced Trigger, by extracting the JobKey from the given job case "forJobDetail": array = (JSONArray) value; JobKey jobKey = JobKey.jobKey((String) array.get(0), (String) array.get(1)); //value[0]=jobName, value[1]=jobGroup if (jobKey != null) { JobDetail jobDetail = sched.getJobDetail(jobKey); if (jobDetail != null) { t = ((TriggerBuilder<Trigger>) t).forJob(jobDetail); } else { throw new SchedulerException(); } } break; //Set the identity of the Job which should be fired by the produced Trigger case "forJobKey": array = (JSONArray) value; t = ((TriggerBuilder<Trigger>) t) .forJob(JobKey.jobKey((String) array.get(0), (String) array.get(1))); //value[0]=jobName, value[1]=jobGroup break; //Set the identity of the Job which should be fired by the produced Trigger - a JobKey will be produced with the given name and default group case "forJobName": t = ((TriggerBuilder<Trigger>) t).forJob((String) value); break; //Set the identity of the Job which should be fired by the produced Trigger - a JobKey will be produced with the given name and group case "forJobNameGroup": //This case (withJobIdentityNameGroup) could be called when invoking this method (through scheduleJob of index.jsp), when adding a new trigger to an existing job. //In that case (in the method scheduleJob of this class), the newly built job is trashed and not added to the scheduler. The trigger must then contain the job name and group in order to successfully add it //to the scheduler case "withJobIdentityNameGroup": array = (JSONArray) value; t = ((TriggerBuilder<Trigger>) t).forJob((String) array.get(0), (String) array.get(1)); //value[0]=jobName, value[1]=jobGroup break; //Set the name of the Calendar that should be applied to this Trigger's schedule case "modifiedByCalendar": t = ((TriggerBuilder<Trigger>) t).modifiedByCalendar((String) value); break; //Set the time the Trigger should start at - the trigger may or may not fire at this time - depending upon the schedule configured for the Trigger case "startAt": t = ((TriggerBuilder<Trigger>) t).startAt(new Date(Long.parseLong((String) value))); //number of milliseconds from January 1st 1970 break; //Set the time the Trigger should start at to the current moment - the trigger may or may not fire at this time - depending upon the schedule configured for the Trigger case "startNow": t = ((TriggerBuilder<Trigger>) t).startNow(); //number of milliseconds from January 1st 1970 break; //case "usingJobData": // //break; //Set the given (human-meaningful) description of the Trigger case "withDescription": t = ((TriggerBuilder<Trigger>) t).withDescription((String) value); break; //Use a TriggerKey with the given name and default group to identify the Trigger case "withIdentityName": t = ((TriggerBuilder<Trigger>) t).withIdentity((String) value); break; //Use a TriggerKey with the given name and group to identify the Trigger case "withIdentityNameGroup": array = (JSONArray) value; t = ((TriggerBuilder<Trigger>) t).withIdentity((String) array.get(0), (String) array.get(1)); //value[0]=triggerName, value[1]=triggerGroup break; //Use the given TriggerKey to identify the Trigger case "withIdentityTriggerKey": t = ((TriggerBuilder<Trigger>) t).withIdentity(TriggerKey.triggerKey((String) value)); break; //Set the Trigger's priority case "withPriority": t = ((TriggerBuilder<Trigger>) t).withPriority(Integer.parseInt((String) value)); break; //********withSchedule******** //Specify that the trigger will repeat indefinitely case "repeatForever": sbt = value.equals("true") ? sbt.repeatForever() : sbt; withSchedule = true; break; //Specify a repeat interval in minutes - which will then be multiplied by 60 * 60 * 1000 to produce milliseconds case "withIntervalInHours": sbt = sbt.withIntervalInHours(Integer.parseInt((String) value)); withSchedule = true; break; //Specify a repeat interval in milliseconds case "withIntervalInMilliseconds": sbt = sbt.withIntervalInMilliseconds(Integer.parseInt((String) value)); withSchedule = true; break; //Specify a repeat interval in minutes - which will then be multiplied by 60 * 1000 to produce milliseconds case "withIntervalInMinutes": sbt = sbt.withIntervalInMinutes(Integer.parseInt((String) value)); withSchedule = true; break; //Specify a repeat interval in seconds - which will then be multiplied by 1000 to produce milliseconds case "withIntervalInSeconds": sbt = sbt.withIntervalInSeconds(Integer.parseInt((String) value)); withSchedule = true; break; //If the Trigger misfires, use the SimpleTrigger.MISFIRE_INSTRUCTION_FIRE_NOW instruction case "withMisfireHandlingInstructionFireNow": sbt = sbt.withMisfireHandlingInstructionFireNow(); withSchedule = true; break; //If the Trigger misfires, use the Trigger.MISFIRE_INSTRUCTION_IGNORE_MISFIRE_POLICY instruction case "withMisfireHandlingInstructionIgnoreMisfires": sbt = sbt.withMisfireHandlingInstructionIgnoreMisfires(); withSchedule = true; break; //If the Trigger misfires, use the SimpleTrigger.MISFIRE_INSTRUCTION_RESCHEDULE_NEXT_WITH_EXISTING_COUNT instruction case "withMisfireHandlingInstructionNextWithExistingCount": sbt = sbt.withMisfireHandlingInstructionNextWithExistingCount(); withSchedule = true; break; //If the Trigger misfires, use the SimpleTrigger.MISFIRE_INSTRUCTION_RESCHEDULE_NEXT_WITH_REMAINING_COUNT instruction case "withMisfireHandlingInstructionNextWithRemainingCount": sbt = sbt.withMisfireHandlingInstructionNextWithRemainingCount(); withSchedule = true; break; //If the Trigger misfires, use the SimpleTrigger.MISFIRE_INSTRUCTION_RESCHEDULE_NOW_WITH_EXISTING_REPEAT_COUNT instruction case "withMisfireHandlingInstructionNowWithExistingCount": sbt = sbt.withMisfireHandlingInstructionNowWithExistingCount(); withSchedule = true; break; //If the Trigger misfires, use the SimpleTrigger.MISFIRE_INSTRUCTION_RESCHEDULE_NOW_WITH_REMAINING_REPEAT_COUNT instruction case "withMisfireHandlingInstructionNowWithRemainingCount": sbt = sbt.withMisfireHandlingInstructionNowWithRemainingCount(); withSchedule = true; break; //Specify a the number of time the trigger will repeat - total number of firings will be this number + 1 case "withRepeatCount": sbt = sbt.withRepeatCount(Integer.parseInt((String) value)); withSchedule = true; break; } //it.remove(); // avoids a ConcurrentModificationException } //request.getParameterMap(); if (withSchedule) { t = ((TriggerBuilder<Trigger>) t).withSchedule(sbt); } return ((TriggerBuilder<Trigger>) t); //the build is not done here (.build()) } catch (SchedulerException e) { return null; } }
From source file:sce.Main.java
public JobBuilder buildJob(JSONObject jsonObject) { JobBuilder t;//from w w w.j a v a2 s. c o m //if the url parameter is not null, then istantiate the RESTJob class JSONObject jobDataMap = (JSONObject) jsonObject.get("jobDataMap"); //get the class type for this job (e.g., RESTJob, ProcessExecutorJob, RESTXMLJob) String jobClass = (String) jsonObject.get("jobClass"); //set the job class switch (jobClass) { case "DumbJob": //if isNonConcurrent = true, then istantiate the DumbJobStateful class if (jobDataMap != null && jobDataMap != null && jobDataMap.get("#isNonConcurrent") != null && ((String) jobDataMap.get("#isNonConcurrent")).equals("true")) { t = newJob(DumbJobStateful.class); } else { t = newJob(DumbJob.class); } break; case "ProcessExecutorJob": //if isNonConcurrent = true, then istantiate the ProcessExecutorStateful class if (jobDataMap != null && jobDataMap.get("#isNonConcurrent") != null && ((String) jobDataMap.get("#isNonConcurrent")).equals("true")) { t = newJob(ProcessExecutorStateful.class); } else { t = newJob(ProcessExecutor.class); } break; case "RESTJob": //if isNonConcurrent = true, then istantiate the RESTJobStateful class if (jobDataMap != null && jobDataMap.get("#isNonConcurrent") != null && (jobDataMap.get("#isNonConcurrent")).equals("true")) { t = newJob(RESTJobStateful.class); } else { t = newJob(RESTJob.class); } break; case "RESTXMLJob": //if isNonConcurrent = true, then istantiate the RESTXMLJobStateful class if (jobDataMap != null && jobDataMap.get("#isNonConcurrent") != null && (jobDataMap.get("#isNonConcurrent")).equals("true")) { t = newJob(RESTXMLJobStateful.class); } else { t = newJob(RESTXMLJob.class); } break; case "RESTKBJob": //if isNonConcurrent = true, then istantiate the RESTJobStateful class if (jobDataMap != null && jobDataMap.get("#isNonConcurrent") != null && (jobDataMap.get("#isNonConcurrent")).equals("true")) { t = newJob(RESTKBJobStateful.class); } else { t = newJob(RESTKBJob.class); } break; case "RESTCheckSLAJob": //if isNonConcurrent = true, then istantiate the RESTCheckSLAJobStateful class if (jobDataMap != null && jobDataMap.get("#isNonConcurrent") != null && (jobDataMap.get("#isNonConcurrent")).equals("true")) { t = newJob(RESTCheckSLAJobStateful.class); } else { t = newJob(RESTCheckSLAJob.class); } break; case "ElasticJob": //if isNonConcurrent = true, then istantiate the ElasticJobStateful class if (jobDataMap != null && jobDataMap.get("#isNonConcurrent") != null && (jobDataMap.get("#isNonConcurrent")).equals("true")) { t = newJob(ElasticJobStateful.class); } else { t = newJob(ElasticJob.class); } break; case "RESTAppMetricJob": //if isNonConcurrent = true, then istantiate the RESTAppMetricJobStateful class if (jobDataMap != null && jobDataMap.get("#isNonConcurrent") != null && (jobDataMap.get("#isNonConcurrent")).equals("true")) { t = newJob(RESTAppMetricJobStateful.class); } else { t = newJob(RESTAppMetricJob.class); } break; default: t = newJob(DumbJob.class); break; } //DUMB JOB /*if (jobDataMap == null || (jobDataMap.get("#url") == null && jobDataMap.get("#processParameters") == null)) { //if isNonConcurrent = true, then istantiate the DumbJobStateful class if (jobDataMap != null && jobDataMap.get("#isNonConcurrent") != null && ((String) jobDataMap.get("#isNonConcurrent")).equals("true")) { t = newJob(DumbJobStateful.class); } else { t = newJob(DumbJob.class); } //using dumb data //Add the given key-value pair to the JobDetail's JobDataMap //t = t.usingJobData("jobSays", "Hello World!"); //t = t.usingJobData("myFloatValue", 3.141f); } //PROCESS EXECUTOR JOB else if (jobDataMap.get("#processParameters") != null) { //if isNonConcurrent = true, then istantiate the ProcessExecutorStateful class if (jobDataMap.get("#isNonConcurrent") != null && ((String) jobDataMap.get("#isNonConcurrent")).equals("true")) { t = newJob(ProcessExecutorStateful.class); } else { t = newJob(ProcessExecutor.class); } } //REST JOB else if (jobDataMap.get("#url") != null && jobDataMap.get("#binding") == null) { //if isNonConcurrent = true, then istantiate the RESTJobStateful class if (jobDataMap.get("#isNonConcurrent") != null && (jobDataMap.get("#isNonConcurrent")).equals("true")) { t = newJob(RESTJobStateful.class); } else { t = newJob(RESTJob.class); } } //REST XML JOB else if (jobDataMap.get("#url") != null && jobDataMap.get("#binding") != null) { //if isNonConcurrent = true, then istantiate the RESTJobStateful class if (jobDataMap.get("#isNonConcurrent") != null && (jobDataMap.get("#isNonConcurrent")).equals("true")) { t = newJob(RESTXMLJobStateful.class); } else { t = newJob(RESTXMLJob.class); } } //ELSE else { t = newJob(DumbJob.class); }*/ //set job data map values from a json job data map key => value, if defined if (jobDataMap != null) { Iterator dataMapIterator = jobDataMap.keySet().iterator(); while (dataMapIterator.hasNext()) { String key = (String) dataMapIterator.next(); String value = (String) jobDataMap.get(key); t = t.usingJobData(key, value); } } Iterator it = jsonObject.keySet().iterator(); JSONArray array; while (it.hasNext()) { String key = (String) it.next(); Object value = jsonObject.get(key); switch (key) { //Set whether or not the Job should remain stored after it is orphaned //Whether or not the Job should remain stored after it is orphaned (no Triggers point to it) //If a job is non-durable, it is automatically deleted from the scheduler once there are no longer //any active triggers associated with it. In other words, non-durable jobs have a life span bounded by the existence of its triggers case "storeDurably": t = t.storeDurably(((String) value).equals("true")); break; case "usingJobDataBoolean": for (String s : (String[]) value) { t = t.usingJobData("usingJobDataBoolean", s.equals("true") ? Boolean.TRUE : Boolean.FALSE); } break; case "usingJobDataDouble": for (String s : (String[]) value) { t = t.usingJobData("usingJobDataDouble", Double.parseDouble(s)); } break; case "usingJobDataFloat": for (String s : (String[]) value) { t = t.usingJobData("usingJobDataFloat", Float.parseFloat(s)); } break; case "usingJobDataInteger": for (String s : (String[]) value) { t = t.usingJobData("usingJobDataInteger", Integer.parseInt(s)); } break; case "usingJobDataLong": for (String s : (String[]) value) { t = t.usingJobData("usingJobDataLong", Long.parseLong(s)); } break; case "usingJobDataString": for (String s : (String[]) value) { t = t.usingJobData("usingJobDataString", s); } break; //Set the description given to the Job instance by its creator (if any) case "withJobDescription": t = t.withDescription((String) value); break; //Set the job name case "withJobIdentityName": t = t.withIdentity(JobKey.jobKey((String) value)); //value[0]=jobName break; //Set the job name and group case "withJobIdentityNameGroup": array = (JSONArray) value; t = t.withIdentity(JobKey.jobKey((String) array.get(0), (String) array.get(1))); //value[0]=jobName, value[1]=jobGroup break; //In clustering mode, this parameter must be set to true to ensure job fail-over //Instructs the Scheduler whether or not the Job should be re-executed if a 'recovery' or 'fail-over' situation is encountered //If a job "requests recovery", and it is executing during the time of a 'hard shutdown' of the scheduler (i.e. the process it is //running within crashes, or the machine is shut off), then it is re-executed when the scheduler is started again. In this case, the //JobExecutionContext.Recovering property will return true case "requestRecovery": t = t.requestRecovery(((String) value).equals("true")); break; //REST call /*case "url": t = t.usingJobData("url", (String) value); break; //store notificationEmail case "notificationEmail": t = t.usingJobData("notificationEmail", (String) value); break;*/ } //it.remove(); // avoids a ConcurrentModificationException } return t; //the build is not done here (.build()) }
From source file:sce.Main.java
public String updateJobDataMap(JSONObject jsonObject) { try {//from w w w.j a v a2 s .c o m JobKey jobKey = JobKey.jobKey((String) jsonObject.get("jobName"), (String) jsonObject.get("jobGroup")); if (sched.checkExists(jobKey)) { JobDetail jobDetail = sched.getJobDetail(jobKey); JobDataMap jobDataMap = jobDetail.getJobDataMap(); Iterator it = jsonObject.keySet().iterator(); while (it.hasNext()) { String key = (String) it.next(); String value = (String) jsonObject.get(key); jobDataMap.put(key, value); } sched.addJob(jobDetail, true); //replace the stored job with the new one return "true"; } else { return "false"; } } catch (SchedulerException e) { return e.getMessage(); } }
From source file:sce.ProcessExecutor.java
@Override public void execute(JobExecutionContext context) throws JobExecutionException { try {//from w ww . j a v a 2 s. c o m JobKey key = context.getJobDetail().getKey(); JobDataMap jobDataMap = context.getJobDetail().getJobDataMap(); //set job execution timeout in seconds if (jobDataMap.containsKey("#jobTimeout")) { this.timeout = Long.parseLong(jobDataMap.getString("#jobTimeout")); } //default job execution timeout in seconds (0 means to wait forever) else { this.timeout = 0; } //set java environment variables if (jobDataMap.containsKey("#environment")) { this.environment = jobDataMap.getString("#environment"); } else { this.environment = ""; } //json has the form of an array, whose elements are associative arrays (i.e., 0->("processPath"->"/var/www/html/processBinary"), 1->("parameter1"->"value1")...) if (jobDataMap.containsKey("#processParameters")) { //read json from request JSONParser parser = new JSONParser(); JSONArray jsonarray = (JSONArray) parser.parse(jobDataMap.getString("#processParameters")); int size = jsonarray.size(); String[] processParameters = null; boolean script = false; //set to true if the process is a sh script for (int i = 0; i < size; i++) { JSONObject jsonobject = (JSONObject) jsonarray.get(i); Iterator<?> keys = jsonobject.keySet().iterator(); while (keys.hasNext()) { String k = (String) keys.next(); //this condition is true only for the first element of the jsonobject (i.e., processParameters is istantiated once) if (k.equals("processPath")) { String processPath = (String) jsonobject.get("processPath"); if (processPath.endsWith(".sh")) { script = true; processParameters = new String[size + 1]; processParameters[i] = "/bin/sh"; } else { processParameters = new String[size]; } } if (script) { processParameters[i + 1] = (String) jsonobject.get(k); } else { processParameters[i] = (String) jsonobject.get(k); } } } String r = executeProcess(processParameters); //set the result to the job execution context, to be able to retrieve it later (e.g., with a job listener) context.setResult(r); //if notificationEmail is defined in the job data map, then send a notification email to it if (jobDataMap.containsKey("#notificationEmail")) { sendEmail(context, jobDataMap.getString("#notificationEmail")); } //trigger the linked jobs of the finished job, depending on the job result [true, false] jobChain(context); //System.out.println("Instance " + key + " of REST Job returns: " + truncateResult(r)); } else { //System.out.println("Instance " + key + " of ProcessExecutor Job returns: process not found"); } } catch (NumberFormatException | ParseException | JobExecutionException e) { throw new JobExecutionException(e.getMessage(), e); } }
From source file:screen.tools.sbs.actions.defaults.ActionAddFlagsTinyPack.java
public static void addFromField(ComponentPack pack, String field) throws FieldException { FieldJSONObject fieldJSONObject = new FieldJSONObject(); fieldJSONObject.set(field);//from ww w. j av a 2 s .c om JSONObject flagsObject = fieldJSONObject.getJSONObject(); Set<?> keySet = flagsObject.keySet(); Iterator<?> iterator = keySet.iterator(); while (iterator.hasNext()) { Object next = iterator.next(); String key = (String) next; ComponentFlag flag = new ComponentFlag(); flag.getKey().set(key); flag.getValue().setObject(flagsObject.get(key)); pack.getFlagList().allocate().merge(flag); } }
From source file:screen.tools.sbs.cmake.SBSCMakeLauncher.java
public void launch(String sbsXmlPath) throws ContextException, FieldException { EnvironmentVariables variables = contextHandler.<EnvironmentVariablesContext>get(ContextKeys.ENV_VARIABLES) .getEnvironmentVariables();/*from w w w.j av a2s .c o m*/ FieldString fieldTargetEnv = variables.getFieldString("TARGET_ENV"); FieldString fieldMakeProg = variables.getFieldString("MAKE_PROGRAM"); FieldString fieldCCompiler = variables.getFieldString("C_COMPILER"); FieldString fieldCppCompiler = variables.getFieldString("CPP_COMPILER"); FieldString fieldRcCompiler = variables.getFieldString("RC_COMPILER"); FieldString fieldAddVarSetString = variables.getFieldString("CMAKE_ADD_VAR_SET"); FieldJSONObject fieldAddVarSet = new FieldJSONObject(); fieldAddVarSet.set(fieldAddVarSetString.getOriginal()); String targetEnv = fieldTargetEnv.get(); if ("/".equals(sbsXmlPath)) sbsXmlPath = "."; List<String> command = new ArrayList<String>(); command.add("cmake"); command.add("."); command.add("-G"); command.add(targetEnv); if (CMakeVersion.isUpperThan(CMakeVersion.getVersion(), new String[] { "2", "8", "4" })) command.add("--no-warn-unused-cli"); if (!fieldMakeProg.isEmpty()) { String makeProg = fieldMakeProg.get(); if (!makeProg.equals("")) command.add("-DCMAKE_MAKE_PROGRAM=\"" + makeProg + "\""); } if (!fieldCCompiler.isEmpty()) { String cCompiler = fieldCCompiler.get(); if (!cCompiler.equals("")) command.add("-DCMAKE_C_COMPILER=\"" + cCompiler + "\""); } if (!fieldCppCompiler.isEmpty()) { String cppCompiler = fieldCppCompiler.get(); if (!cppCompiler.equals("")) command.add("-DCMAKE_CXX_COMPILER=\"" + cppCompiler + "\""); } if (!fieldRcCompiler.isEmpty()) { String rcCompiler = fieldRcCompiler.get(); if (!rcCompiler.equals("")) command.add("-DCMAKE_RC_COMPILER=\"" + rcCompiler + "\""); } if (!fieldAddVarSet.isEmpty()) { JSONObject jsonObject = fieldAddVarSet.getJSONObject(); Set<?> keySet = jsonObject.keySet(); Iterator<?> iterator = keySet.iterator(); while (iterator.hasNext()) { Object next = iterator.next(); if (next instanceof String) { String key = (String) next; Object object = jsonObject.get(key); if (object == null) { command.add("-D" + key); } if (object instanceof String) { String value = (String) object; command.add("-D" + key + "=\"" + value + "\""); } else if (object instanceof Number) { Number value = (Number) object; command.add("-D" + key + "=" + value); } } } } Logger.info(ProcessLauncher.getCommand(command)); ProcessHandler processHandler = new ProcessHandler(command) { @Override public void processOutLine(String line) { Logger.info(line); } @Override public void processErrLine(String line) { Logger.error(line); ErrorList.instance.addError(line); } }; processHandler.getProcessBuilder().directory(new File(sbsXmlPath)); processHandler.exec(); }
From source file:search.handler.BuildIndex.java
/** * Index all the metadata//from w w w . jav a 2 s . com * @param conn the database connection * @param map map of languages to index-writers * @throws SearchException */ private void indexMetadata(Connection conn, HashMap<String, IndexWriter> map) throws SearchException { try { for (int i = 0; i < metadata.length; i++) { if (metadata[i] != null) { String bson = conn.getFromDb(Database.METADATA, metadata[i]); JSONObject jDoc = (JSONObject) JSONValue.parse(bson); Set<String> keys = jDoc.keySet(); Iterator<String> iter = keys.iterator(); Document doc = null; String language = getLanguage(metadata[i]); IndexWriter writer = map.get(language); while (iter.hasNext()) { String key = iter.next(); if (metadataKeys.contains(key)) { if (doc == null) { doc = new Document(); Field docid = new StringField("docid", (String) jDoc.get(JSONKeys.DOCID), Field.Store.YES); doc.add(docid); } StringReader sr = new StringReader((String) jDoc.get(key)); doc.add(new TextField(key, sr)); } } if (doc != null) { Field database = new StringField("database", Database.METADATA, Field.Store.YES); doc.add(database); writer.addDocument(doc); } } incDone(); } } catch (Exception e) { throw new SearchException(e); } }