List of usage examples for com.amazonaws.services.elasticmapreduce.model BootstrapActionConfig getScriptBootstrapAction
public ScriptBootstrapActionConfig getScriptBootstrapAction()
The script run by the bootstrap action.
From source file:org.finra.dm.dao.impl.EmrDaoImpl.java
License:Apache License
private void addDaemonBootstrapActionConfig(EmrClusterDefinition emrClusterDefinition, ArrayList<BootstrapActionConfig> bootstrapActions) { // Add daemon Configuration support if needed if (!CollectionUtils.isEmpty(emrClusterDefinition.getDaemonConfigurations())) { BootstrapActionConfig daemonBootstrapActionConfig = getBootstrapActionConfig( ConfigurationValue.EMR_CONFIGURE_DAEMON.getKey(), configurationHelper.getProperty(ConfigurationValue.EMR_CONFIGURE_DAEMON)); // Add arguments to the bootstrap script ArrayList<String> argList = new ArrayList<>(); for (Parameter daemonConfig : emrClusterDefinition.getDaemonConfigurations()) { argList.add(daemonConfig.getName() + "=" + daemonConfig.getValue()); }/*w w w.j a v a 2 s.c o m*/ // Add the bootstrap action with arguments daemonBootstrapActionConfig.getScriptBootstrapAction().setArgs(argList); bootstrapActions.add(daemonBootstrapActionConfig); } }
From source file:org.finra.dm.dao.impl.EmrDaoImpl.java
License:Apache License
private void addHadoopBootstrapActionConfig(EmrClusterDefinition emrClusterDefinition, ArrayList<BootstrapActionConfig> bootstrapActions) { // Add hadoop Configuration support if needed if (!CollectionUtils.isEmpty(emrClusterDefinition.getHadoopConfigurations())) { ArrayList<String> argList = new ArrayList<>(); BootstrapActionConfig hadoopBootstrapActionConfig = getBootstrapActionConfig( ConfigurationValue.EMR_CONFIGURE_HADOOP.getKey(), configurationHelper.getProperty(ConfigurationValue.EMR_CONFIGURE_HADOOP)); // If config files are available, add them as arguments for (Object hadoopConfigObject : emrClusterDefinition.getHadoopConfigurations()) { // If the Config Files are available, add them as arguments if (hadoopConfigObject instanceof ConfigurationFiles) { for (ConfigurationFile configurationFile : ((ConfigurationFiles) hadoopConfigObject) .getConfigurationFiles()) { argList.add(configurationFile.getFileNameShortcut()); argList.add(configurationFile.getConfigFileLocation()); }// www . j a va2 s .c o m } // If the key value pairs are available, add them as arguments if (hadoopConfigObject instanceof KeyValuePairConfigurations) { for (KeyValuePairConfiguration keyValuePairConfiguration : ((KeyValuePairConfigurations) hadoopConfigObject) .getKeyValuePairConfigurations()) { argList.add(keyValuePairConfiguration.getKeyValueShortcut()); argList.add(keyValuePairConfiguration.getAttribKey() + "=" + keyValuePairConfiguration.getAttribVal()); } } } if (!CollectionUtils.isEmpty(argList)) { // Add the bootstrap action with arguments hadoopBootstrapActionConfig.getScriptBootstrapAction().setArgs(argList); bootstrapActions.add(hadoopBootstrapActionConfig); } } }
From source file:org.finra.dm.dao.impl.EmrDaoImpl.java
License:Apache License
private void addCustomBootstrapActionConfig(EmrClusterDefinition emrClusterDefinition, ArrayList<BootstrapActionConfig> bootstrapActions) { // Add Custom bootstrap script support if needed if (!CollectionUtils.isEmpty(emrClusterDefinition.getCustomBootstrapActionAll())) { for (ScriptDefinition scriptDefinition : emrClusterDefinition.getCustomBootstrapActionAll()) { BootstrapActionConfig customActionConfigAll = getBootstrapActionConfig( scriptDefinition.getScriptName(), scriptDefinition.getScriptLocation()); ArrayList<String> argList = new ArrayList<>(); if (!CollectionUtils.isEmpty(scriptDefinition.getScriptArguments())) { for (String argument : scriptDefinition.getScriptArguments()) { // Trim the argument argList.add(argument.trim()); }/*from ww w . ja va 2 s.c o m*/ } // Set arguments to bootstrap action customActionConfigAll.getScriptBootstrapAction().setArgs(argList); bootstrapActions.add(customActionConfigAll); } } }
From source file:org.finra.dm.dao.impl.EmrDaoImpl.java
License:Apache License
private void addCustomMasterBootstrapActionConfig(EmrClusterDefinition emrClusterDefinition, ArrayList<BootstrapActionConfig> bootstrapActions) { // Add Master custom bootstrap script support if needed if (!CollectionUtils.isEmpty(emrClusterDefinition.getCustomBootstrapActionMaster())) { for (ScriptDefinition scriptDefinition : emrClusterDefinition.getCustomBootstrapActionMaster()) { BootstrapActionConfig bootstrapActionConfig = getBootstrapActionConfig( scriptDefinition.getScriptName(), configurationHelper.getProperty(ConfigurationValue.EMR_CONDITIONAL_SCRIPT)); // Add arguments to the bootstrap script ArrayList<String> argList = new ArrayList<>(); // Execute this script only on the master node. argList.add(configurationHelper.getProperty(ConfigurationValue.EMR_NODE_CONDITION)); argList.add(scriptDefinition.getScriptLocation()); if (!CollectionUtils.isEmpty(scriptDefinition.getScriptArguments())) { for (String argument : scriptDefinition.getScriptArguments()) { // Trim the argument argList.add(argument.trim()); }//ww w.j av a2 s. c o m } bootstrapActionConfig.getScriptBootstrapAction().setArgs(argList); bootstrapActions.add(bootstrapActionConfig); } } }
From source file:org.finra.herd.dao.impl.EmrDaoImpl.java
License:Apache License
private void addHadoopBootstrapActionConfig(EmrClusterDefinition emrClusterDefinition, ArrayList<BootstrapActionConfig> bootstrapActions) { // Add hadoop Configuration support if needed if (!CollectionUtils.isEmpty(emrClusterDefinition.getHadoopConfigurations())) { ArrayList<String> argList = new ArrayList<>(); BootstrapActionConfig hadoopBootstrapActionConfig = getBootstrapActionConfig( ConfigurationValue.EMR_CONFIGURE_HADOOP.getKey(), configurationHelper.getProperty(ConfigurationValue.EMR_CONFIGURE_HADOOP)); // If config files are available, add them as arguments for (Object hadoopConfigObject : emrClusterDefinition.getHadoopConfigurations()) { // If the Config Files are available, add them as arguments if (hadoopConfigObject instanceof ConfigurationFiles) { for (ConfigurationFile configurationFile : ((ConfigurationFiles) hadoopConfigObject) .getConfigurationFiles()) { argList.add(configurationFile.getFileNameShortcut()); argList.add(configurationFile.getConfigFileLocation()); }//from w ww . j a v a2 s.c o m } // If the key value pairs are available, add them as arguments if (hadoopConfigObject instanceof KeyValuePairConfigurations) { for (KeyValuePairConfiguration keyValuePairConfiguration : ((KeyValuePairConfigurations) hadoopConfigObject) .getKeyValuePairConfigurations()) { argList.add(keyValuePairConfiguration.getKeyValueShortcut()); argList.add(keyValuePairConfiguration.getAttribKey() + "=" + keyValuePairConfiguration.getAttribVal()); } } } // Add the bootstrap action with arguments hadoopBootstrapActionConfig.getScriptBootstrapAction().setArgs(argList); bootstrapActions.add(hadoopBootstrapActionConfig); } }
From source file:org.huahinframework.emanager.util.JobUtils.java
License:Apache License
public static JSONObject getJobFlow(String jobFlow, AmazonElasticMapReduce emr) { DescribeJobFlowsRequest describeJobFlowsRequest = new DescribeJobFlowsRequest().withJobFlowIds(jobFlow); DescribeJobFlowsResult describeJobFlowsResult = emr.describeJobFlows(describeJobFlowsRequest); if (describeJobFlowsResult.getJobFlows().size() != 1) { return new JSONObject(); }//from w w w.j av a 2s . c o m JobFlowDetail jobFlowDetail = describeJobFlowsResult.getJobFlows().get(0); JobFlowExecutionStatusDetail executionStatusDetail = jobFlowDetail.getExecutionStatusDetail(); JobFlowInstancesDetail instancesDetail = jobFlowDetail.getInstances(); Map<String, Object> m = new HashMap<String, Object>(); m.put(Response.JOB_FLOW, jobFlowDetail.getJobFlowId()); m.put(Response.STATE, executionStatusDetail.getState()); m.put(Response.CREATION_DATE, executionStatusDetail.getCreationDateTime().toString()); m.put(Response.START_DATE, object2String(executionStatusDetail.getStartDateTime(), true)); m.put(Response.END_DATE, object2String(executionStatusDetail.getEndDateTime(), true)); m.put(Response.AMI_VERSION, object2String(jobFlowDetail.getAmiVersion(), false)); m.put(Response.NAME, jobFlowDetail.getName()); m.put(Response.LOG_URI, object2String(jobFlowDetail.getLogUri(), false)); if (!isEmpty(jobFlowDetail.getSupportedProducts())) { m.put(Response.SUPPORTED_PRODUCTS, jobFlowDetail.getSupportedProducts()); } m.put(Response.EC2_KEY_NAME, object2String(instancesDetail.getEc2KeyName(), false)); m.put(Response.EC2_SUBNET_ID, object2String(instancesDetail.getEc2SubnetId(), false)); m.put(Response.HADOOP_VERSION, object2String(instancesDetail.getHadoopVersion(), false)); m.put(Response.INSTANCE_COUNT, integer2String(instancesDetail.getInstanceCount())); m.put(Response.KEEP_JOB_FLOW_ALIVE_WHEN_NO_STEPS, object2String(instancesDetail.getKeepJobFlowAliveWhenNoSteps(), true)); m.put(Response.MASTER_INSTANCE_ID, object2String(instancesDetail.getMasterInstanceId(), false)); m.put(Response.MASTER_INSTANCE_TYPE, object2String(instancesDetail.getMasterInstanceType(), false)); m.put(Response.MASTER_PUBLIC_DNS_NAME, object2String(instancesDetail.getMasterPublicDnsName(), false)); m.put(Response.AVAILABILITY_ZONE, object2String(instancesDetail.getPlacement().getAvailabilityZone(), false)); m.put(Response.SLAVE_INSTANCE_TYPE, object2String(instancesDetail.getSlaveInstanceType(), false)); if (!isEmpty(jobFlowDetail.getBootstrapActions())) { List<Object> l = new ArrayList<Object>(); for (BootstrapActionDetail bad : jobFlowDetail.getBootstrapActions()) { Map<String, Object> mm = new HashMap<String, Object>(); BootstrapActionConfig bac = bad.getBootstrapActionConfig(); ScriptBootstrapActionConfig sbac = bac.getScriptBootstrapAction(); mm.put(Response.NAME, object2String(bac.getName(), false)); mm.put(Response.PATH, object2String(sbac.getPath(), false)); if (!isEmpty(sbac.getArgs())) { mm.put(Response.ARGS, sbac.getArgs()); } l.add(mm); } m.put(Response.BOOTSTRAP_ACTIONS, l); } if (!isEmpty(jobFlowDetail.getSteps())) { List<Object> l = new ArrayList<Object>(); for (StepDetail sd : jobFlowDetail.getSteps()) { Map<String, Object> mm = new HashMap<String, Object>(); StepConfig sc = sd.getStepConfig(); StepExecutionStatusDetail sesd = sd.getExecutionStatusDetail(); mm.put(Response.NAME, sc.getName()); mm.put(Response.ACTION_ON_FAILURE, sc.getActionOnFailure()); mm.put(Response.STATE, object2String(sesd.getState(), false)); mm.put(Response.CREATION_DATE, object2String(sesd.getCreationDateTime(), true)); mm.put(Response.START_DATE, object2String(sesd.getStartDateTime(), true)); mm.put(Response.END_DATE, object2String(sesd.getEndDateTime(), true)); HadoopJarStepConfig hjsc = sc.getHadoopJarStep(); mm.put(Response.JAR, object2String(hjsc.getJar(), false)); mm.put(Response.MAIN_CLASS, object2String(hjsc.getMainClass(), false)); if (!isEmpty(hjsc.getArgs())) { mm.put(Response.ARGS, hjsc.getArgs()); } l.add(mm); } m.put(Response.STEPS, l); } return new JSONObject(m); }