Example usage for com.amazonaws.services.elasticmapreduce.model JobFlowExecutionStatusDetail getEndDateTime

List of usage examples for com.amazonaws.services.elasticmapreduce.model JobFlowExecutionStatusDetail getEndDateTime

Introduction

In this page you can find the example usage for com.amazonaws.services.elasticmapreduce.model JobFlowExecutionStatusDetail getEndDateTime.

Prototype


public java.util.Date getEndDateTime() 

Source Link

Document

The completion date and time of the job flow.

Usage

From source file:org.huahinframework.emanager.util.JobUtils.java

License:Apache License

public static JSONArray listJobFlow(AmazonElasticMapReduce emr) {
    List<JSONObject> l = new ArrayList<JSONObject>();

    DescribeJobFlowsResult describeJobFlowsResult = emr.describeJobFlows(new DescribeJobFlowsRequest());
    for (JobFlowDetail jobFlowDetail : describeJobFlowsResult.getJobFlows()) {
        JobFlowExecutionStatusDetail executionStatusDetail = jobFlowDetail.getExecutionStatusDetail();
        Map<String, String> m = new HashMap<String, String>();
        m.put(Response.JOB_FLOW, jobFlowDetail.getJobFlowId());
        m.put(Response.STATE, executionStatusDetail.getState());
        m.put(Response.CREATION_DATE, executionStatusDetail.getCreationDateTime().toString());
        m.put(Response.START_DATE, object2String(executionStatusDetail.getStartDateTime(), true));
        m.put(Response.END_DATE, object2String(executionStatusDetail.getEndDateTime(), true));
        l.add(new JSONObject(m));
    }/*from   ww w.j  a  va  2 s . co  m*/

    return new JSONArray(l);
}

From source file:org.huahinframework.emanager.util.JobUtils.java

License:Apache License

public static JSONArray runningsJobFlow(AmazonElasticMapReduce emr) {
    List<JSONObject> l = new ArrayList<JSONObject>();

    DescribeJobFlowsRequest describeJobFlowsRequest = new DescribeJobFlowsRequest()
            .withJobFlowStates(JOB_FLOW_STATUS_STARTING, JOB_FLOW_STATUS_RUNNING, JOB_FLOW_STATUS_WAITING);
    DescribeJobFlowsResult describeJobFlowsResult = emr.describeJobFlows(describeJobFlowsRequest);
    for (JobFlowDetail jobFlowDetail : describeJobFlowsResult.getJobFlows()) {
        JobFlowExecutionStatusDetail executionStatusDetail = jobFlowDetail.getExecutionStatusDetail();
        Map<String, Object> m = new HashMap<String, Object>();
        m.put(Response.JOB_FLOW, jobFlowDetail.getJobFlowId());
        m.put(Response.STATE, executionStatusDetail.getState());
        m.put(Response.CREATION_DATE, executionStatusDetail.getCreationDateTime().toString());
        m.put(Response.START_DATE, object2String(executionStatusDetail.getStartDateTime(), true));
        m.put(Response.END_DATE, object2String(executionStatusDetail.getEndDateTime(), true));

        if (!isEmpty(jobFlowDetail.getSteps())) {
            List<Object> ll = new ArrayList<Object>();
            for (StepDetail sd : jobFlowDetail.getSteps()) {
                Map<String, Object> mm = new HashMap<String, Object>();
                StepConfig sc = sd.getStepConfig();
                StepExecutionStatusDetail sesd = sd.getExecutionStatusDetail();

                mm.put(Response.NAME, sc.getName());
                mm.put(Response.ACTION_ON_FAILURE, sc.getActionOnFailure());
                mm.put(Response.STATE, object2String(sesd.getState(), false));
                mm.put(Response.CREATION_DATE, object2String(sesd.getCreationDateTime(), true));
                mm.put(Response.START_DATE, object2String(sesd.getStartDateTime(), true));
                mm.put(Response.END_DATE, object2String(sesd.getEndDateTime(), true));

                HadoopJarStepConfig hjsc = sc.getHadoopJarStep();
                mm.put(Response.JAR, object2String(hjsc.getJar(), false));
                mm.put(Response.MAIN_CLASS, object2String(hjsc.getMainClass(), false));

                if (!isEmpty(hjsc.getArgs())) {
                    mm.put(Response.ARGS, hjsc.getArgs());
                }/*from   w w w  . j  av  a 2  s .  c o m*/

                ll.add(mm);
            }
            m.put(Response.STEPS, ll);
        }

        l.add(new JSONObject(m));
    }

    return new JSONArray(l);
}

From source file:org.huahinframework.emanager.util.JobUtils.java

License:Apache License

public static JSONObject getJobFlow(String jobFlow, AmazonElasticMapReduce emr) {
    DescribeJobFlowsRequest describeJobFlowsRequest = new DescribeJobFlowsRequest().withJobFlowIds(jobFlow);
    DescribeJobFlowsResult describeJobFlowsResult = emr.describeJobFlows(describeJobFlowsRequest);
    if (describeJobFlowsResult.getJobFlows().size() != 1) {
        return new JSONObject();
    }/*  ww  w .j ava2 s  . c om*/

    JobFlowDetail jobFlowDetail = describeJobFlowsResult.getJobFlows().get(0);
    JobFlowExecutionStatusDetail executionStatusDetail = jobFlowDetail.getExecutionStatusDetail();
    JobFlowInstancesDetail instancesDetail = jobFlowDetail.getInstances();

    Map<String, Object> m = new HashMap<String, Object>();
    m.put(Response.JOB_FLOW, jobFlowDetail.getJobFlowId());
    m.put(Response.STATE, executionStatusDetail.getState());
    m.put(Response.CREATION_DATE, executionStatusDetail.getCreationDateTime().toString());
    m.put(Response.START_DATE, object2String(executionStatusDetail.getStartDateTime(), true));
    m.put(Response.END_DATE, object2String(executionStatusDetail.getEndDateTime(), true));
    m.put(Response.AMI_VERSION, object2String(jobFlowDetail.getAmiVersion(), false));
    m.put(Response.NAME, jobFlowDetail.getName());
    m.put(Response.LOG_URI, object2String(jobFlowDetail.getLogUri(), false));

    if (!isEmpty(jobFlowDetail.getSupportedProducts())) {
        m.put(Response.SUPPORTED_PRODUCTS, jobFlowDetail.getSupportedProducts());
    }

    m.put(Response.EC2_KEY_NAME, object2String(instancesDetail.getEc2KeyName(), false));
    m.put(Response.EC2_SUBNET_ID, object2String(instancesDetail.getEc2SubnetId(), false));
    m.put(Response.HADOOP_VERSION, object2String(instancesDetail.getHadoopVersion(), false));
    m.put(Response.INSTANCE_COUNT, integer2String(instancesDetail.getInstanceCount()));
    m.put(Response.KEEP_JOB_FLOW_ALIVE_WHEN_NO_STEPS,
            object2String(instancesDetail.getKeepJobFlowAliveWhenNoSteps(), true));
    m.put(Response.MASTER_INSTANCE_ID, object2String(instancesDetail.getMasterInstanceId(), false));
    m.put(Response.MASTER_INSTANCE_TYPE, object2String(instancesDetail.getMasterInstanceType(), false));
    m.put(Response.MASTER_PUBLIC_DNS_NAME, object2String(instancesDetail.getMasterPublicDnsName(), false));
    m.put(Response.AVAILABILITY_ZONE,
            object2String(instancesDetail.getPlacement().getAvailabilityZone(), false));
    m.put(Response.SLAVE_INSTANCE_TYPE, object2String(instancesDetail.getSlaveInstanceType(), false));

    if (!isEmpty(jobFlowDetail.getBootstrapActions())) {
        List<Object> l = new ArrayList<Object>();
        for (BootstrapActionDetail bad : jobFlowDetail.getBootstrapActions()) {
            Map<String, Object> mm = new HashMap<String, Object>();

            BootstrapActionConfig bac = bad.getBootstrapActionConfig();
            ScriptBootstrapActionConfig sbac = bac.getScriptBootstrapAction();

            mm.put(Response.NAME, object2String(bac.getName(), false));
            mm.put(Response.PATH, object2String(sbac.getPath(), false));

            if (!isEmpty(sbac.getArgs())) {
                mm.put(Response.ARGS, sbac.getArgs());
            }
            l.add(mm);
        }
        m.put(Response.BOOTSTRAP_ACTIONS, l);
    }

    if (!isEmpty(jobFlowDetail.getSteps())) {
        List<Object> l = new ArrayList<Object>();
        for (StepDetail sd : jobFlowDetail.getSteps()) {
            Map<String, Object> mm = new HashMap<String, Object>();
            StepConfig sc = sd.getStepConfig();
            StepExecutionStatusDetail sesd = sd.getExecutionStatusDetail();

            mm.put(Response.NAME, sc.getName());
            mm.put(Response.ACTION_ON_FAILURE, sc.getActionOnFailure());
            mm.put(Response.STATE, object2String(sesd.getState(), false));
            mm.put(Response.CREATION_DATE, object2String(sesd.getCreationDateTime(), true));
            mm.put(Response.START_DATE, object2String(sesd.getStartDateTime(), true));
            mm.put(Response.END_DATE, object2String(sesd.getEndDateTime(), true));

            HadoopJarStepConfig hjsc = sc.getHadoopJarStep();
            mm.put(Response.JAR, object2String(hjsc.getJar(), false));
            mm.put(Response.MAIN_CLASS, object2String(hjsc.getMainClass(), false));

            if (!isEmpty(hjsc.getArgs())) {
                mm.put(Response.ARGS, hjsc.getArgs());
            }

            l.add(mm);
        }
        m.put(Response.STEPS, l);
    }

    return new JSONObject(m);
}