Example usage for com.amazonaws.services.elasticmapreduce.model StepConfig setActionOnFailure

List of usage examples for com.amazonaws.services.elasticmapreduce.model StepConfig setActionOnFailure

Introduction

In this page you can find the example usage for com.amazonaws.services.elasticmapreduce.model StepConfig setActionOnFailure.

Prototype


public void setActionOnFailure(ActionOnFailure actionOnFailure) 

Source Link

Document

The action to take when the cluster step fails.

Usage

From source file:datameer.awstasks.aws.emr.EmrCluster.java

License:Apache License

private static StepConfig createDebugStep() {
    StepConfig debugStep = new StepConfig();
    debugStep.setName("Setup Hadoop Debugging");
    debugStep.setActionOnFailure("TERMINATE_JOB_FLOW");

    HadoopJarStepConfig hadoopJarStepConfig = new HadoopJarStepConfig();
    hadoopJarStepConfig.setJar("s3://us-east-1.elasticmapreduce/libs/script-runner/script-runner.jar");
    hadoopJarStepConfig.getArgs().add("s3://us-east-1.elasticmapreduce/libs/state-pusher/0.1/fetch");
    debugStep.setHadoopJarStep(hadoopJarStepConfig);
    return debugStep;
}

From source file:datameer.awstasks.aws.emr.EmrCluster.java

License:Apache License

public StepFuture executeJobStep(String name, File jobJar, String s3JobJarName, Class<?> mainClass,
        String... args) {//from   w  w w.  j ava  2 s  . c om
    checkConnection(true);
    HadoopJarStepConfig jarConfig = new HadoopJarStepConfig();
    if (jobJar != null) {
        String s3JobJarUri = uploadingJobJar(jobJar, s3JobJarName);
        jarConfig.setJar(s3JobJarUri);
    }
    if (mainClass != null) {
        jarConfig.setMainClass(mainClass.getName());
    }
    jarConfig.setArgs(Arrays.asList(args));
    StepConfig stepConfig = new StepConfig();
    stepConfig.setName(name);
    stepConfig.setActionOnFailure("CONTINUE");
    stepConfig.setHadoopJarStep(jarConfig);
    _emrWebService
            .addJobFlowSteps(new AddJobFlowStepsRequest().withJobFlowId(_jobFlowId).withSteps(stepConfig));
    _emrWebService.clearDescribeJobFlowCache();
    return new StepFuture(stepConfig.getName(), getStepIndex(getJobFlowDetail(_jobFlowId), name));
}

From source file:org.pentaho.amazon.hive.job.AmazonHiveJobExecutor.java

License:Apache License

/**
 * Configure the HadoopJarStep, which is one Hadoop step of an EMR job to be submitted to AWS.
 * //w  w w .  jav a  2 s  . c o  m
 * @param stepName
 *          name of step
 * @param stagingS3JarUrl
 *          URL for MapReduce jar file
 * @param args
 *          arguments for MapReduce jar
 * @return configuration data object for the step
 * 
 */
public List<StepConfig> ConfigHadoopJarStep(String stepName, String stagingS3JarUrl, String args) {

    List<String> jarStepArgs = new ArrayList<String>();
    jarStepArgs = ConfigArgs(args, " "); //$NON-NLS-1$

    HadoopJarStepConfig hadoopJarStep = new HadoopJarStepConfig();
    hadoopJarStep.setJar(stagingS3JarUrl);
    hadoopJarStep.setArgs(jarStepArgs);

    StepConfig stepConfig = new StepConfig();
    stepConfig.setName(stepName);
    stepConfig.setHadoopJarStep(hadoopJarStep);
    if (isAlive()) { // Job flow stays in "WAITING" state if this step fails.
        stepConfig.setActionOnFailure("CANCEL_AND_WAIT"); //$NON-NLS-1$
    } else { // Job flow is terminated if this step fails.
        stepConfig.setActionOnFailure("TERMINATE_JOB_FLOW"); //$NON-NLS-1$
    }

    List<StepConfig> steps = new ArrayList<StepConfig>();
    steps.add(stepConfig);

    return steps;
}