List of usage examples for com.amazonaws.services.elasticmapreduce.model JobFlowDetail getName
public String getName()
The name of the job flow.
From source file:datameer.awstasks.aws.emr.EmrCluster.java
License:Apache License
private void waitUntilClusterStarted(final String jobFlowId) throws InterruptedException { doWhileNot(new Callable<Boolean>() { @Override/*w w w . j a va 2 s . c o m*/ public Boolean call() throws Exception { JobFlowDetail jobFlowDetail = getJobFlowDetail(jobFlowId); JobFlowState state = JobFlowState.valueOf(jobFlowDetail.getExecutionStatusDetail().getState()); LOG.info("elastic cluster '" + jobFlowDetail.getName() + "/" + jobFlowId + "' in state '" + state + "'"); boolean finished = state != JobFlowState.STARTING && state != JobFlowState.BOOTSTRAPPING; if (finished) { _masterHost = jobFlowDetail.getInstances().getMasterPublicDnsName(); _instanceCount = jobFlowDetail.getInstances().getInstanceCount(); if (!state.isOperational()) { throw new IllegalStateException( "starting of job flow '" + jobFlowId + "' failed with state '" + state + "'"); } _startTime = jobFlowDetail.getExecutionStatusDetail().getStartDateTime().getTime(); } return finished; } }, getRequestInterval()); }
From source file:datameer.awstasks.aws.emr.EmrCluster.java
License:Apache License
protected List<JobFlowDetail> getRunningJobFlowDetailsByName(String name) { DescribeJobFlowsResult describeJobFlows = _emrWebService.describeJobFlows(new DescribeJobFlowsRequest() .withJobFlowStates(JobFlowState.STARTING.name(), JobFlowState.BOOTSTRAPPING.name(), JobFlowState.WAITING.name(), JobFlowState.RUNNING.name())); List<JobFlowDetail> jobFlows = describeJobFlows.getJobFlows(); for (Iterator<JobFlowDetail> iterator = jobFlows.iterator(); iterator.hasNext();) { JobFlowDetail jobFlowDetail = iterator.next(); if (!name.equals(jobFlowDetail.getName())) { iterator.remove();//from w w w.ja v a 2 s. c o m } } return jobFlows; }
From source file:org.huahinframework.emanager.util.JobUtils.java
License:Apache License
public static JSONObject getJobFlow(String jobFlow, AmazonElasticMapReduce emr) { DescribeJobFlowsRequest describeJobFlowsRequest = new DescribeJobFlowsRequest().withJobFlowIds(jobFlow); DescribeJobFlowsResult describeJobFlowsResult = emr.describeJobFlows(describeJobFlowsRequest); if (describeJobFlowsResult.getJobFlows().size() != 1) { return new JSONObject(); }/*from w w w.j av a2 s . c o m*/ JobFlowDetail jobFlowDetail = describeJobFlowsResult.getJobFlows().get(0); JobFlowExecutionStatusDetail executionStatusDetail = jobFlowDetail.getExecutionStatusDetail(); JobFlowInstancesDetail instancesDetail = jobFlowDetail.getInstances(); Map<String, Object> m = new HashMap<String, Object>(); m.put(Response.JOB_FLOW, jobFlowDetail.getJobFlowId()); m.put(Response.STATE, executionStatusDetail.getState()); m.put(Response.CREATION_DATE, executionStatusDetail.getCreationDateTime().toString()); m.put(Response.START_DATE, object2String(executionStatusDetail.getStartDateTime(), true)); m.put(Response.END_DATE, object2String(executionStatusDetail.getEndDateTime(), true)); m.put(Response.AMI_VERSION, object2String(jobFlowDetail.getAmiVersion(), false)); m.put(Response.NAME, jobFlowDetail.getName()); m.put(Response.LOG_URI, object2String(jobFlowDetail.getLogUri(), false)); if (!isEmpty(jobFlowDetail.getSupportedProducts())) { m.put(Response.SUPPORTED_PRODUCTS, jobFlowDetail.getSupportedProducts()); } m.put(Response.EC2_KEY_NAME, object2String(instancesDetail.getEc2KeyName(), false)); m.put(Response.EC2_SUBNET_ID, object2String(instancesDetail.getEc2SubnetId(), false)); m.put(Response.HADOOP_VERSION, object2String(instancesDetail.getHadoopVersion(), false)); m.put(Response.INSTANCE_COUNT, integer2String(instancesDetail.getInstanceCount())); m.put(Response.KEEP_JOB_FLOW_ALIVE_WHEN_NO_STEPS, object2String(instancesDetail.getKeepJobFlowAliveWhenNoSteps(), true)); m.put(Response.MASTER_INSTANCE_ID, object2String(instancesDetail.getMasterInstanceId(), false)); m.put(Response.MASTER_INSTANCE_TYPE, object2String(instancesDetail.getMasterInstanceType(), false)); m.put(Response.MASTER_PUBLIC_DNS_NAME, object2String(instancesDetail.getMasterPublicDnsName(), false)); m.put(Response.AVAILABILITY_ZONE, object2String(instancesDetail.getPlacement().getAvailabilityZone(), false)); m.put(Response.SLAVE_INSTANCE_TYPE, object2String(instancesDetail.getSlaveInstanceType(), false)); if (!isEmpty(jobFlowDetail.getBootstrapActions())) { List<Object> l = new ArrayList<Object>(); for (BootstrapActionDetail bad : jobFlowDetail.getBootstrapActions()) { Map<String, Object> mm = new HashMap<String, Object>(); BootstrapActionConfig bac = bad.getBootstrapActionConfig(); ScriptBootstrapActionConfig sbac = bac.getScriptBootstrapAction(); mm.put(Response.NAME, object2String(bac.getName(), false)); mm.put(Response.PATH, object2String(sbac.getPath(), false)); if (!isEmpty(sbac.getArgs())) { mm.put(Response.ARGS, sbac.getArgs()); } l.add(mm); } m.put(Response.BOOTSTRAP_ACTIONS, l); } if (!isEmpty(jobFlowDetail.getSteps())) { List<Object> l = new ArrayList<Object>(); for (StepDetail sd : jobFlowDetail.getSteps()) { Map<String, Object> mm = new HashMap<String, Object>(); StepConfig sc = sd.getStepConfig(); StepExecutionStatusDetail sesd = sd.getExecutionStatusDetail(); mm.put(Response.NAME, sc.getName()); mm.put(Response.ACTION_ON_FAILURE, sc.getActionOnFailure()); mm.put(Response.STATE, object2String(sesd.getState(), false)); mm.put(Response.CREATION_DATE, object2String(sesd.getCreationDateTime(), true)); mm.put(Response.START_DATE, object2String(sesd.getStartDateTime(), true)); mm.put(Response.END_DATE, object2String(sesd.getEndDateTime(), true)); HadoopJarStepConfig hjsc = sc.getHadoopJarStep(); mm.put(Response.JAR, object2String(hjsc.getJar(), false)); mm.put(Response.MAIN_CLASS, object2String(hjsc.getMainClass(), false)); if (!isEmpty(hjsc.getArgs())) { mm.put(Response.ARGS, hjsc.getArgs()); } l.add(mm); } m.put(Response.STEPS, l); } return new JSONObject(m); }