List of usage examples for com.amazonaws.services.elasticmapreduce.model ActionOnFailure CANCEL_AND_WAIT
ActionOnFailure CANCEL_AND_WAIT
To view the source code for com.amazonaws.services.elasticmapreduce.model ActionOnFailure CANCEL_AND_WAIT.
Click Source Link
From source file:org.finra.dm.dao.helper.EmrHelper.java
License:Apache License
/** * Builds the StepConfig for the Hadoop jar step. * * @param stepName the step name./*from w w w.j a va 2 s .com*/ * @param jarLocation the location of jar. * @param mainClass the main class. * @param scriptArguments the arguments. * @param isContinueOnError indicate what to do on error. * * @return the stepConfig. */ public StepConfig getEmrHadoopJarStepConfig(String stepName, String jarLocation, String mainClass, List<String> scriptArguments, Boolean isContinueOnError) { // Default ActionOnFailure is to cancel the execution and wait ActionOnFailure actionOnFailure = ActionOnFailure.CANCEL_AND_WAIT; if (isContinueOnError != null && isContinueOnError) { // Override based on user input actionOnFailure = ActionOnFailure.CONTINUE; } // If there are no arguments if (CollectionUtils.isEmpty(scriptArguments)) { // Build the StepConfig object and return return new StepConfig().withName(stepName.trim()).withActionOnFailure(actionOnFailure) .withHadoopJarStep( new HadoopJarStepConfig().withJar(jarLocation.trim()).withMainClass(mainClass)); } else { // If there are arguments, include the arguments in the StepConfig object return new StepConfig().withName(stepName.trim()).withActionOnFailure(actionOnFailure) .withHadoopJarStep( new HadoopJarStepConfig().withJar(jarLocation.trim()).withMainClass(mainClass) .withArgs(scriptArguments.toArray(new String[scriptArguments.size()]))); } }
From source file:org.finra.dm.service.helper.EmrHiveStepHelper.java
License:Apache License
@Override public StepConfig getEmrStepConfig(Object step) { EmrHiveStep emrHiveStep = (EmrHiveStep) step; // Default ActionOnFailure is to cancel the execution and wait ActionOnFailure actionOnFailure = ActionOnFailure.CANCEL_AND_WAIT; if (emrHiveStep.isContinueOnError() != null && emrHiveStep.isContinueOnError()) { // Override based on user input actionOnFailure = ActionOnFailure.CONTINUE; }//from w w w . j ava2 s .com // If there are no arguments to hive script if (CollectionUtils.isEmpty(emrHiveStep.getScriptArguments())) { // Just build the StepConfig object and return return new StepConfig().withName(emrHiveStep.getStepName().trim()).withActionOnFailure(actionOnFailure) .withHadoopJarStep( new StepFactory().newRunHiveScriptStep(emrHiveStep.getScriptLocation().trim())); } // If there are arguments specified else { // For each argument, add "-d" option List<String> hiveArgs = new ArrayList<>(); for (String hiveArg : emrHiveStep.getScriptArguments()) { hiveArgs.add("-d"); hiveArgs.add(hiveArg); } // Return the StepConfig object return new StepConfig().withName(emrHiveStep.getStepName().trim()).withActionOnFailure(actionOnFailure) .withHadoopJarStep(new StepFactory().newRunHiveScriptStep( emrHiveStep.getScriptLocation().trim(), hiveArgs.toArray(new String[hiveArgs.size()]))); } }
From source file:org.finra.dm.service.helper.EmrOozieStepHelper.java
License:Apache License
@Override public StepConfig getEmrStepConfig(Object step) { EmrOozieStep oozieStep = (EmrOozieStep) step; // Hadoop Jar provided by Amazon to run shell script String hadoopJarForShellScript = configurationHelper.getProperty(ConfigurationValue.EMR_SHELL_SCRIPT_JAR); // Oozie SDK cannot be used at the moment, as the Oozie port 11000 needs to be opened for Oozie SDK usage // As a workaround, a custom shell script is used to run the Oozie client to add any oozie job // Once Oozie SDK implementation is in place, this custom shell script can be removed // Get the custom oozie shell script String oozieShellScript = emrHelper.getS3StagingLocation() + configurationHelper.getProperty(ConfigurationValue.S3_URL_PATH_DELIMITER) + configurationHelper.getProperty(ConfigurationValue.EMR_OOZIE_RUN_SCRIPT); // Default ActionOnFailure is to cancel the execution and wait ActionOnFailure actionOnFailure = ActionOnFailure.CANCEL_AND_WAIT; if (oozieStep.isContinueOnError() != null && oozieStep.isContinueOnError()) { // Override based on user input actionOnFailure = ActionOnFailure.CONTINUE; }//from w w w . j a v a2 s.c om // Add the arguments to the custom shell script List<String> argsList = new ArrayList<>(); // Get the oozie client run shell script argsList.add(oozieShellScript); // Specify the arguments argsList.add(oozieStep.getWorkflowXmlLocation().trim()); argsList.add(oozieStep.getOoziePropertiesFileLocation().trim()); // Build the StepConfig object and return HadoopJarStepConfig jarConfig = new HadoopJarStepConfig(hadoopJarForShellScript).withArgs(argsList); return new StepConfig().withName(oozieStep.getStepName().trim()).withActionOnFailure(actionOnFailure) .withHadoopJarStep(jarConfig); }
From source file:org.finra.dm.service.helper.EmrPigStepHelper.java
License:Apache License
@Override public StepConfig getEmrStepConfig(Object step) { EmrPigStep pigStep = (EmrPigStep) step; // Default ActionOnFailure is to cancel the execution and wait ActionOnFailure actionOnFailure = ActionOnFailure.CANCEL_AND_WAIT; if (pigStep.isContinueOnError() != null && pigStep.isContinueOnError()) { // Override based on user input actionOnFailure = ActionOnFailure.CONTINUE; }//from w w w . ja va 2s .c o m // If there are no arguments to hive script if (CollectionUtils.isEmpty(pigStep.getScriptArguments())) { // Just build the StepConfig object and return return new StepConfig().withName(pigStep.getStepName().trim()).withActionOnFailure(actionOnFailure) .withHadoopJarStep(new StepFactory().newRunPigScriptStep(pigStep.getScriptLocation().trim())); } // If there are arguments specified else { return new StepConfig().withName(pigStep.getStepName().trim()).withActionOnFailure(actionOnFailure) .withHadoopJarStep(new StepFactory().newRunPigScriptStep(pigStep.getScriptLocation().trim(), pigStep.getScriptArguments().toArray(new String[pigStep.getScriptArguments().size()]))); } }
From source file:org.finra.dm.service.helper.EmrShellStepHelper.java
License:Apache License
@Override public StepConfig getEmrStepConfig(Object step) { EmrShellStep emrShellStep = (EmrShellStep) step; // Hadoop Jar provided by Amazon for running Shell Scripts String hadoopJarForShellScript = configurationHelper.getProperty(ConfigurationValue.EMR_SHELL_SCRIPT_JAR); // Default ActionOnFailure is to cancel the execution and wait ActionOnFailure actionOnFailure = ActionOnFailure.CANCEL_AND_WAIT; if (emrShellStep.isContinueOnError() != null && emrShellStep.isContinueOnError()) { // Override based on user input actionOnFailure = ActionOnFailure.CONTINUE; }// w w w . ja v a 2s .com // Add the script location List<String> argsList = new ArrayList<>(); argsList.add(emrShellStep.getScriptLocation().trim()); // Add the script arguments if (!CollectionUtils.isEmpty(emrShellStep.getScriptArguments())) { for (String argument : emrShellStep.getScriptArguments()) { argsList.add(argument.trim()); } } // Return the StepConfig object HadoopJarStepConfig jarConfig = new HadoopJarStepConfig(hadoopJarForShellScript).withArgs(argsList); return new StepConfig().withName(emrShellStep.getStepName().trim()).withActionOnFailure(actionOnFailure) .withHadoopJarStep(jarConfig); }
From source file:org.pentaho.amazon.client.impl.EmrClientImpl.java
License:Apache License
private StepConfig configureHiveStep(String stagingS3qUrl, String cmdLineArgs) { String[] cmdLineArgsArr;/*w w w.j a v a 2 s. c o m*/ if (cmdLineArgs == null) { cmdLineArgsArr = new String[] { "" }; } else { List<String> cmdArgs = Arrays.asList(cmdLineArgs.split("\\s+")); List<String> updatedCmdArgs = cmdArgs.stream().map(e -> replaceDoubleS3(e)) .collect(Collectors.toList()); cmdLineArgsArr = updatedCmdArgs.toArray(new String[updatedCmdArgs.size()]); } StepConfig hiveStepConfig = new StepConfig("Hive", new StepFactory().newRunHiveScriptStep(stagingS3qUrl, cmdLineArgsArr)); if (alive) { hiveStepConfig.withActionOnFailure(ActionOnFailure.CANCEL_AND_WAIT); } else { hiveStepConfig.withActionOnFailure(ActionOnFailure.TERMINATE_JOB_FLOW); } return hiveStepConfig; }
From source file:org.pentaho.amazon.client.impl.EmrClientImpl.java
License:Apache License
private StepConfig initHadoopStep(String jarUrl, String mainClass, List<String> jarStepArgs) { StepConfig stepConfig = new StepConfig(); stepConfig.setName("custom jar: " + jarUrl); stepConfig.setHadoopJarStep(configureHadoopStep(jarUrl, mainClass, jarStepArgs)); if (this.alive) { stepConfig.withActionOnFailure(ActionOnFailure.CANCEL_AND_WAIT); } else {//from w w w . jav a 2 s . c o m stepConfig.withActionOnFailure(ActionOnFailure.TERMINATE_JOB_FLOW); } return stepConfig; }