List of usage examples for com.amazonaws.services.elasticmapreduce.model JobFlowExecutionStatusDetail getState
public String getState()
The state of the job flow.
From source file:awswc.AwsConsoleApp.java
License:Open Source License
public static void wasFinished(RunJobFlowResult runJobFlowResult) throws InterruptedException { DescribeJobFlowsRequest describeJobFlowsRequest = new DescribeJobFlowsRequest() .withJobFlowIds(runJobFlowResult.getJobFlowId()); int secondsBetweenPolling = 120; String state = null;/*from w w w . j a v a 2 s . c om*/ do { Thread.sleep(secondsBetweenPolling * 1000); DescribeJobFlowsResult jobFlowsResult = emr.describeJobFlows(describeJobFlowsRequest); JobFlowDetail detail = jobFlowsResult.getJobFlows().get(0); JobFlowExecutionStatusDetail executionStatusDetail = detail.getExecutionStatusDetail(); state = executionStatusDetail.getState(); } while (state != null && !state.equals("COMPLETE") && !state.equals("FAILED") && !state.equals("TERMINATED")); }
From source file:fr.ens.biologie.genomique.eoulsan.util.cloud.AWSElasticMapReduceJob.java
License:LGPL
/** * Wait the end of the job/* w w w . j ava 2 s. c om*/ * @param secondBetweenChecking number of seconds to wait between 2 checks * @return the final state of the job */ public String waitForJob(final int secondBetweenChecking) { if (this.runFlowResult == null) { return null; } final DescribeJobFlowsRequest describeJobFlowsRequest = new DescribeJobFlowsRequest() .withJobFlowIds(this.runFlowResult.getJobFlowId()); String state = null; String lastState = null; int failCount = 0; try { do { Thread.sleep(secondBetweenChecking * 1000); try { final DescribeJobFlowsResult jobFlowsResult = this.elasticMapReduceClient .describeJobFlows(describeJobFlowsRequest); final JobFlowDetail detail = jobFlowsResult.getJobFlows().get(0); final JobFlowExecutionStatusDetail executionStatusDetail = detail.getExecutionStatusDetail(); failCount = 0; state = executionStatusDetail.getState(); } catch (AmazonClientException ace) { failCount++; getLogger().warning("Amazon client exception: " + ace.getMessage()); if (failCount >= MAX_FAIL_COUNT) { throw ace; } } if (lastState == null || !lastState.equals(state)) { getLogger().info("State of the job " + this.runFlowResult.getJobFlowId() + ": " + state); lastState = state; } } while (state != null && !state.equals("COMPLETED") && !state.equals("FAILED") && !state.equals("TERMINATED")); return state; } catch (InterruptedException e) { getLogger().warning("Error while waiting AWS Elastic MapReduce Job: " + e.getMessage()); } return null; }
From source file:org.huahinframework.emanager.util.JobUtils.java
License:Apache License
public static JSONArray listJobFlow(AmazonElasticMapReduce emr) { List<JSONObject> l = new ArrayList<JSONObject>(); DescribeJobFlowsResult describeJobFlowsResult = emr.describeJobFlows(new DescribeJobFlowsRequest()); for (JobFlowDetail jobFlowDetail : describeJobFlowsResult.getJobFlows()) { JobFlowExecutionStatusDetail executionStatusDetail = jobFlowDetail.getExecutionStatusDetail(); Map<String, String> m = new HashMap<String, String>(); m.put(Response.JOB_FLOW, jobFlowDetail.getJobFlowId()); m.put(Response.STATE, executionStatusDetail.getState()); m.put(Response.CREATION_DATE, executionStatusDetail.getCreationDateTime().toString()); m.put(Response.START_DATE, object2String(executionStatusDetail.getStartDateTime(), true)); m.put(Response.END_DATE, object2String(executionStatusDetail.getEndDateTime(), true)); l.add(new JSONObject(m)); }//from w w w. j av a2 s.c om return new JSONArray(l); }
From source file:org.huahinframework.emanager.util.JobUtils.java
License:Apache License
public static JSONArray runningsJobFlow(AmazonElasticMapReduce emr) { List<JSONObject> l = new ArrayList<JSONObject>(); DescribeJobFlowsRequest describeJobFlowsRequest = new DescribeJobFlowsRequest() .withJobFlowStates(JOB_FLOW_STATUS_STARTING, JOB_FLOW_STATUS_RUNNING, JOB_FLOW_STATUS_WAITING); DescribeJobFlowsResult describeJobFlowsResult = emr.describeJobFlows(describeJobFlowsRequest); for (JobFlowDetail jobFlowDetail : describeJobFlowsResult.getJobFlows()) { JobFlowExecutionStatusDetail executionStatusDetail = jobFlowDetail.getExecutionStatusDetail(); Map<String, Object> m = new HashMap<String, Object>(); m.put(Response.JOB_FLOW, jobFlowDetail.getJobFlowId()); m.put(Response.STATE, executionStatusDetail.getState()); m.put(Response.CREATION_DATE, executionStatusDetail.getCreationDateTime().toString()); m.put(Response.START_DATE, object2String(executionStatusDetail.getStartDateTime(), true)); m.put(Response.END_DATE, object2String(executionStatusDetail.getEndDateTime(), true)); if (!isEmpty(jobFlowDetail.getSteps())) { List<Object> ll = new ArrayList<Object>(); for (StepDetail sd : jobFlowDetail.getSteps()) { Map<String, Object> mm = new HashMap<String, Object>(); StepConfig sc = sd.getStepConfig(); StepExecutionStatusDetail sesd = sd.getExecutionStatusDetail(); mm.put(Response.NAME, sc.getName()); mm.put(Response.ACTION_ON_FAILURE, sc.getActionOnFailure()); mm.put(Response.STATE, object2String(sesd.getState(), false)); mm.put(Response.CREATION_DATE, object2String(sesd.getCreationDateTime(), true)); mm.put(Response.START_DATE, object2String(sesd.getStartDateTime(), true)); mm.put(Response.END_DATE, object2String(sesd.getEndDateTime(), true)); HadoopJarStepConfig hjsc = sc.getHadoopJarStep(); mm.put(Response.JAR, object2String(hjsc.getJar(), false)); mm.put(Response.MAIN_CLASS, object2String(hjsc.getMainClass(), false)); if (!isEmpty(hjsc.getArgs())) { mm.put(Response.ARGS, hjsc.getArgs()); }/*from w w w.ja va 2s.com*/ ll.add(mm); } m.put(Response.STEPS, ll); } l.add(new JSONObject(m)); } return new JSONArray(l); }
From source file:org.huahinframework.emanager.util.JobUtils.java
License:Apache License
public static JSONObject getJobFlow(String jobFlow, AmazonElasticMapReduce emr) { DescribeJobFlowsRequest describeJobFlowsRequest = new DescribeJobFlowsRequest().withJobFlowIds(jobFlow); DescribeJobFlowsResult describeJobFlowsResult = emr.describeJobFlows(describeJobFlowsRequest); if (describeJobFlowsResult.getJobFlows().size() != 1) { return new JSONObject(); }/*from w ww . j a v a2s .co m*/ JobFlowDetail jobFlowDetail = describeJobFlowsResult.getJobFlows().get(0); JobFlowExecutionStatusDetail executionStatusDetail = jobFlowDetail.getExecutionStatusDetail(); JobFlowInstancesDetail instancesDetail = jobFlowDetail.getInstances(); Map<String, Object> m = new HashMap<String, Object>(); m.put(Response.JOB_FLOW, jobFlowDetail.getJobFlowId()); m.put(Response.STATE, executionStatusDetail.getState()); m.put(Response.CREATION_DATE, executionStatusDetail.getCreationDateTime().toString()); m.put(Response.START_DATE, object2String(executionStatusDetail.getStartDateTime(), true)); m.put(Response.END_DATE, object2String(executionStatusDetail.getEndDateTime(), true)); m.put(Response.AMI_VERSION, object2String(jobFlowDetail.getAmiVersion(), false)); m.put(Response.NAME, jobFlowDetail.getName()); m.put(Response.LOG_URI, object2String(jobFlowDetail.getLogUri(), false)); if (!isEmpty(jobFlowDetail.getSupportedProducts())) { m.put(Response.SUPPORTED_PRODUCTS, jobFlowDetail.getSupportedProducts()); } m.put(Response.EC2_KEY_NAME, object2String(instancesDetail.getEc2KeyName(), false)); m.put(Response.EC2_SUBNET_ID, object2String(instancesDetail.getEc2SubnetId(), false)); m.put(Response.HADOOP_VERSION, object2String(instancesDetail.getHadoopVersion(), false)); m.put(Response.INSTANCE_COUNT, integer2String(instancesDetail.getInstanceCount())); m.put(Response.KEEP_JOB_FLOW_ALIVE_WHEN_NO_STEPS, object2String(instancesDetail.getKeepJobFlowAliveWhenNoSteps(), true)); m.put(Response.MASTER_INSTANCE_ID, object2String(instancesDetail.getMasterInstanceId(), false)); m.put(Response.MASTER_INSTANCE_TYPE, object2String(instancesDetail.getMasterInstanceType(), false)); m.put(Response.MASTER_PUBLIC_DNS_NAME, object2String(instancesDetail.getMasterPublicDnsName(), false)); m.put(Response.AVAILABILITY_ZONE, object2String(instancesDetail.getPlacement().getAvailabilityZone(), false)); m.put(Response.SLAVE_INSTANCE_TYPE, object2String(instancesDetail.getSlaveInstanceType(), false)); if (!isEmpty(jobFlowDetail.getBootstrapActions())) { List<Object> l = new ArrayList<Object>(); for (BootstrapActionDetail bad : jobFlowDetail.getBootstrapActions()) { Map<String, Object> mm = new HashMap<String, Object>(); BootstrapActionConfig bac = bad.getBootstrapActionConfig(); ScriptBootstrapActionConfig sbac = bac.getScriptBootstrapAction(); mm.put(Response.NAME, object2String(bac.getName(), false)); mm.put(Response.PATH, object2String(sbac.getPath(), false)); if (!isEmpty(sbac.getArgs())) { mm.put(Response.ARGS, sbac.getArgs()); } l.add(mm); } m.put(Response.BOOTSTRAP_ACTIONS, l); } if (!isEmpty(jobFlowDetail.getSteps())) { List<Object> l = new ArrayList<Object>(); for (StepDetail sd : jobFlowDetail.getSteps()) { Map<String, Object> mm = new HashMap<String, Object>(); StepConfig sc = sd.getStepConfig(); StepExecutionStatusDetail sesd = sd.getExecutionStatusDetail(); mm.put(Response.NAME, sc.getName()); mm.put(Response.ACTION_ON_FAILURE, sc.getActionOnFailure()); mm.put(Response.STATE, object2String(sesd.getState(), false)); mm.put(Response.CREATION_DATE, object2String(sesd.getCreationDateTime(), true)); mm.put(Response.START_DATE, object2String(sesd.getStartDateTime(), true)); mm.put(Response.END_DATE, object2String(sesd.getEndDateTime(), true)); HadoopJarStepConfig hjsc = sc.getHadoopJarStep(); mm.put(Response.JAR, object2String(hjsc.getJar(), false)); mm.put(Response.MAIN_CLASS, object2String(hjsc.getMainClass(), false)); if (!isEmpty(hjsc.getArgs())) { mm.put(Response.ARGS, hjsc.getArgs()); } l.add(mm); } m.put(Response.STEPS, l); } return new JSONObject(m); }