List of usage examples for org.apache.hadoop.mapred.jobcontrol Job getMessage
public synchronized String getMessage()
From source file:com.ebay.erl.mobius.core.MobiusJobRunner.java
License:Apache License
private static String jobToString(Job aJob) { StringBuffer sb = new StringBuffer(); sb.append("job mapred id:\t") .append(aJob.getAssignedJobID() == null ? "unassigned" : aJob.getAssignedJobID().toString()) .append("\t"); sb.append("job name: ").append(aJob.getJobName()).append("\n"); String state = "Unset"; switch (aJob.getState()) { case Job.DEPENDENT_FAILED: state = "DEPENDENT_FAILED"; break;//www . j a va2s .c o m case Job.FAILED: state = "FAILED"; break; case Job.READY: state = "READY"; break; case Job.RUNNING: state = "RUNNING"; break; case Job.SUCCESS: state = "SUCCESS"; break; case Job.WAITING: state = "WAITING"; break; } sb.append("job state:\t").append(state).append("\n"); sb.append("job id:\t").append(aJob.getJobID()).append("\n"); sb.append("job message:\t").append(aJob.getMessage()).append("\n"); // comment out on March 30, 2012. As NPE is thrown on Apollo. // // if ( aJob.getDependingJobs () == null || aJob.getDependingJobs ().size () == 0 ) // { // sb.append ("job has no depending job:\t").append ("\n"); // } else // { // sb.append ("job has ").append (aJob.getDependingJobs ().size ()).append (" dependeng jobs:\n"); // for ( int i = 0; i < aJob.getDependingJobs ().size (); i++ ) // { // sb.append ("\t depending job ").append (i).append (":\t"); // sb.append ((aJob.getDependingJobs ().get (i)).getJobName ()).append ("\n"); // } // } return sb.toString().trim(); }
From source file:com.twitter.pig.backend.hadoop.executionengine.tez.TezLauncher.java
License:Apache License
/** * If stop_on_failure is enabled and any job has failed, an ExecException is thrown. * @param stop_on_failure whether it's enabled. * @throws ExecException If stop_on_failure is enabled and any job is failed *//*from w ww .j a v a2s .com*/ private void checkStopOnFailure(boolean stop_on_failure) throws ExecException { if (jc.getFailedJobs().isEmpty()) return; if (stop_on_failure) { int errCode = 6017; StringBuilder msg = new StringBuilder(); for (int i = 0; i < jc.getFailedJobs().size(); i++) { Job j = jc.getFailedJobs().get(i); msg.append(j.getMessage()); if (i != jc.getFailedJobs().size() - 1) { msg.append("\n"); } } throw new ExecException(msg.toString(), errCode, PigException.REMOTE_ENVIRONMENT); } }
From source file:org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.Launcher.java
License:Apache License
protected void getStats(Job job, JobClient jobClient, boolean errNotDbg, PigContext pigContext) throws Exception { JobID MRJobID = job.getAssignedJobID(); String jobMessage = job.getMessage(); Exception backendException = null; if (MRJobID == null) { try {/* w w w .j a v a2 s . com*/ LogUtils.writeLog("Backend error message during job submission", jobMessage, pigContext.getProperties().getProperty("pig.logfile"), log); backendException = getExceptionFromString(jobMessage); } catch (Exception e) { int errCode = 2997; String msg = "Unable to recreate exception from backend error: " + jobMessage; throw new ExecException(msg, errCode, PigException.BUG); } throw backendException; } try { TaskReport[] mapRep = jobClient.getMapTaskReports(MRJobID); getErrorMessages(mapRep, "map", errNotDbg, pigContext); totalHadoopTimeSpent += computeTimeSpent(mapRep); mapRep = null; TaskReport[] redRep = jobClient.getReduceTaskReports(MRJobID); getErrorMessages(redRep, "reduce", errNotDbg, pigContext); totalHadoopTimeSpent += computeTimeSpent(redRep); redRep = null; } catch (IOException e) { if (job.getState() == Job.SUCCESS) { // if the job succeeded, let the user know that // we were unable to get statistics log.warn("Unable to get job related diagnostics"); } else { throw e; } } }
From source file:org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher.java
License:Apache License
/** * If stop_on_failure is enabled and any job has failed, an ExecException is thrown. * @param stop_on_failure whether it's enabled. * @throws ExecException If stop_on_failure is enabled and any job is failed */// w w w.ja v a 2s . c om private void checkStopOnFailure(boolean stop_on_failure) throws ExecException { if (jc.getFailedJobs().isEmpty()) return; if (stop_on_failure) { int errCode = 6017; StringBuilder msg = new StringBuilder(); for (int i = 0; i < jc.getFailedJobs().size(); i++) { Job j = jc.getFailedJobs().get(i); msg.append("JobID: " + j.getAssignedJobID() + " Reason: " + j.getMessage()); if (i != jc.getFailedJobs().size() - 1) { msg.append("\n"); } } throw new ExecException(msg.toString(), errCode, PigException.REMOTE_ENVIRONMENT); } }
From source file:org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher.java
License:Apache License
private void getStats(Job job, boolean errNotDbg, PigContext pigContext) throws ExecException { JobID MRJobID = job.getAssignedJobID(); String jobMessage = job.getMessage(); Exception backendException = null; if (MRJobID == null) { try {/*from w w w . j a v a2 s . co m*/ LogUtils.writeLog("Backend error message during job submission", jobMessage, pigContext.getProperties().getProperty("pig.logfile"), log); backendException = getExceptionFromString(jobMessage); } catch (Exception e) { int errCode = 2997; String msg = "Unable to recreate exception from backend error: " + jobMessage; throw new ExecException(msg, errCode, PigException.BUG); } throw new ExecException(backendException); } try { TaskReport[] mapRep = HadoopShims.getTaskReports(job, TaskType.MAP); if (mapRep != null) { getErrorMessages(mapRep, "map", errNotDbg, pigContext); totalHadoopTimeSpent += computeTimeSpent(mapRep); mapRep = null; } TaskReport[] redRep = HadoopShims.getTaskReports(job, TaskType.REDUCE); if (redRep != null) { getErrorMessages(redRep, "reduce", errNotDbg, pigContext); totalHadoopTimeSpent += computeTimeSpent(redRep); redRep = null; } } catch (IOException e) { if (job.getState() == Job.SUCCESS) { // if the job succeeded, let the user know that // we were unable to get statistics log.warn("Unable to get job related diagnostics"); } else { throw new ExecException(e); } } catch (Exception e) { throw new ExecException(e); } }
From source file:org.apache.pig.test.pigmix.mapreduce.L1.java
License:Apache License
public static void main(String[] args) throws IOException { if (args.length != 3) { System.out.println("Parameters: inputDir outputDir parallel"); System.exit(1);/*from w ww .j a va 2 s.c om*/ } String inputDir = args[0]; String outputDir = args[1]; String parallel = args[2]; JobConf lp = new JobConf(L1.class); lp.setJobName("L1 Load Page Views"); lp.setInputFormat(TextInputFormat.class); lp.setOutputKeyClass(Text.class); lp.setOutputValueClass(IntWritable.class); lp.setMapperClass(ReadPageViews.class); lp.setCombinerClass(Group.class); lp.setReducerClass(Group.class); Properties props = System.getProperties(); for (Map.Entry<Object, Object> entry : props.entrySet()) { lp.set((String) entry.getKey(), (String) entry.getValue()); } FileInputFormat.addInputPath(lp, new Path(inputDir + "/page_views")); FileOutputFormat.setOutputPath(lp, new Path(outputDir + "/L1out")); lp.setNumReduceTasks(Integer.parseInt(parallel)); Job group = new Job(lp); JobControl jc = new JobControl("L1 join"); jc.addJob(group); new Thread(jc).start(); int i = 0; while (!jc.allFinished()) { ArrayList<Job> failures = jc.getFailedJobs(); if (failures != null && failures.size() > 0) { for (Job failure : failures) { System.err.println(failure.getMessage()); } break; } try { Thread.sleep(5000); } catch (InterruptedException e) { } if (i % 10000 == 0) { System.out.println("Running jobs"); ArrayList<Job> running = jc.getRunningJobs(); if (running != null && running.size() > 0) { for (Job r : running) { System.out.println(r.getJobName()); } } System.out.println("Ready jobs"); ArrayList<Job> ready = jc.getReadyJobs(); if (ready != null && ready.size() > 0) { for (Job r : ready) { System.out.println(r.getJobName()); } } System.out.println("Waiting jobs"); ArrayList<Job> waiting = jc.getWaitingJobs(); if (waiting != null && waiting.size() > 0) { for (Job r : ready) { System.out.println(r.getJobName()); } } System.out.println("Successful jobs"); ArrayList<Job> success = jc.getSuccessfulJobs(); if (success != null && success.size() > 0) { for (Job r : ready) { System.out.println(r.getJobName()); } } } i++; } ArrayList<Job> failures = jc.getFailedJobs(); if (failures != null && failures.size() > 0) { for (Job failure : failures) { System.err.println(failure.getMessage()); } } jc.stop(); }
From source file:org.apache.pig.test.pigmix.mapreduce.L10.java
License:Apache License
public static void main(String[] args) throws IOException { if (args.length != 3) { System.out.println("Parameters: inputDir outputDir parallel"); System.exit(1);/*from w ww. ja v a 2s . c om*/ } String inputDir = args[0]; String outputDir = args[1]; String parallel = args[2]; JobConf lp = new JobConf(L10.class); lp.setJobName("L10 Load Page Views"); lp.setInputFormat(TextInputFormat.class); lp.setOutputKeyClass(MyType.class); lp.setOutputValueClass(Text.class); lp.setMapperClass(ReadPageViews.class); lp.setReducerClass(Group.class); lp.setPartitionerClass(MyPartitioner.class); Properties props = System.getProperties(); for (Map.Entry<Object, Object> entry : props.entrySet()) { lp.set((String) entry.getKey(), (String) entry.getValue()); } FileInputFormat.addInputPath(lp, new Path(inputDir + "/page_views")); FileOutputFormat.setOutputPath(lp, new Path(outputDir + "/L10out")); // Hardcode the parallel to 40 since MyPartitioner assumes it lp.setNumReduceTasks(40); Job group = new Job(lp); JobControl jc = new JobControl("L10 join"); jc.addJob(group); new Thread(jc).start(); int i = 0; while (!jc.allFinished()) { ArrayList<Job> failures = jc.getFailedJobs(); if (failures != null && failures.size() > 0) { for (Job failure : failures) { System.err.println(failure.getMessage()); } break; } try { Thread.sleep(5000); } catch (InterruptedException e) { } if (i % 10000 == 0) { System.out.println("Running jobs"); ArrayList<Job> running = jc.getRunningJobs(); if (running != null && running.size() > 0) { for (Job r : running) { System.out.println(r.getJobName()); } } System.out.println("Ready jobs"); ArrayList<Job> ready = jc.getReadyJobs(); if (ready != null && ready.size() > 0) { for (Job r : ready) { System.out.println(r.getJobName()); } } System.out.println("Waiting jobs"); ArrayList<Job> waiting = jc.getWaitingJobs(); if (waiting != null && waiting.size() > 0) { for (Job r : ready) { System.out.println(r.getJobName()); } } System.out.println("Successful jobs"); ArrayList<Job> success = jc.getSuccessfulJobs(); if (success != null && success.size() > 0) { for (Job r : ready) { System.out.println(r.getJobName()); } } } i++; } ArrayList<Job> failures = jc.getFailedJobs(); if (failures != null && failures.size() > 0) { for (Job failure : failures) { System.err.println(failure.getMessage()); } } jc.stop(); }
From source file:org.apache.pig.test.pigmix.mapreduce.L11.java
License:Apache License
public static void main(String[] args) throws IOException { if (args.length != 3) { System.out.println("Parameters: inputDir outputDir parallel"); System.exit(1);//from w w w . j av a2 s. c om } String inputDir = args[0]; String outputDir = args[1]; String parallel = args[2]; String user = System.getProperty("user.name"); JobConf lp = new JobConf(L11.class); lp.setJobName("L11 Load Page Views"); lp.setInputFormat(TextInputFormat.class); lp.setOutputKeyClass(Text.class); lp.setOutputValueClass(Text.class); lp.setMapperClass(ReadPageViews.class); lp.setCombinerClass(ReadPageViews.class); lp.setReducerClass(ReadPageViews.class); Properties props = System.getProperties(); for (Map.Entry<Object, Object> entry : props.entrySet()) { lp.set((String) entry.getKey(), (String) entry.getValue()); } FileInputFormat.addInputPath(lp, new Path(inputDir + "/page_views")); FileOutputFormat.setOutputPath(lp, new Path(outputDir + "/p")); lp.setNumReduceTasks(Integer.parseInt(parallel)); Job loadPages = new Job(lp); JobConf lu = new JobConf(L11.class); lu.setJobName("L11 Load Widerow"); lu.setInputFormat(TextInputFormat.class); lu.setOutputKeyClass(Text.class); lu.setOutputValueClass(Text.class); lu.setMapperClass(ReadWideRow.class); lu.setCombinerClass(ReadWideRow.class); lu.setReducerClass(ReadWideRow.class); props = System.getProperties(); for (Map.Entry<Object, Object> entry : props.entrySet()) { lu.set((String) entry.getKey(), (String) entry.getValue()); } FileInputFormat.addInputPath(lu, new Path(inputDir + "/widerow")); FileOutputFormat.setOutputPath(lu, new Path(outputDir + "/wr")); lu.setNumReduceTasks(Integer.parseInt(parallel)); Job loadWideRow = new Job(lu); JobConf join = new JobConf(L11.class); join.setJobName("L11 Union WideRow and Pages"); join.setInputFormat(KeyValueTextInputFormat.class); join.setOutputKeyClass(Text.class); join.setOutputValueClass(Text.class); join.setMapperClass(IdentityMapper.class); join.setCombinerClass(Union.class); join.setReducerClass(Union.class); props = System.getProperties(); for (Map.Entry<Object, Object> entry : props.entrySet()) { join.set((String) entry.getKey(), (String) entry.getValue()); } FileInputFormat.addInputPath(join, new Path(outputDir + "/p")); FileInputFormat.addInputPath(join, new Path(outputDir + "/wr")); FileOutputFormat.setOutputPath(join, new Path(outputDir + "/L11out")); join.setNumReduceTasks(Integer.parseInt(parallel)); Job joinJob = new Job(join); joinJob.addDependingJob(loadPages); joinJob.addDependingJob(loadWideRow); JobControl jc = new JobControl("L11 join"); jc.addJob(loadPages); jc.addJob(loadWideRow); jc.addJob(joinJob); new Thread(jc).start(); int i = 0; while (!jc.allFinished()) { ArrayList<Job> failures = jc.getFailedJobs(); if (failures != null && failures.size() > 0) { for (Job failure : failures) { System.err.println(failure.getMessage()); } break; } try { Thread.sleep(5000); } catch (InterruptedException e) { } if (i % 10000 == 0) { System.out.println("Running jobs"); ArrayList<Job> running = jc.getRunningJobs(); if (running != null && running.size() > 0) { for (Job r : running) { System.out.println(r.getJobName()); } } System.out.println("Ready jobs"); ArrayList<Job> ready = jc.getReadyJobs(); if (ready != null && ready.size() > 0) { for (Job r : ready) { System.out.println(r.getJobName()); } } System.out.println("Waiting jobs"); ArrayList<Job> waiting = jc.getWaitingJobs(); if (waiting != null && waiting.size() > 0) { for (Job r : ready) { System.out.println(r.getJobName()); } } System.out.println("Successful jobs"); ArrayList<Job> success = jc.getSuccessfulJobs(); if (success != null && success.size() > 0) { for (Job r : ready) { System.out.println(r.getJobName()); } } } i++; } ArrayList<Job> failures = jc.getFailedJobs(); if (failures != null && failures.size() > 0) { for (Job failure : failures) { System.err.println(failure.getMessage()); } } jc.stop(); }
From source file:org.apache.pig.test.pigmix.mapreduce.L12.java
License:Apache License
public static void main(String[] args) throws IOException { if (args.length != 3) { System.out.println("Parameters: inputDir outputDir parallel"); System.exit(1);// w w w .ja v a 2 s. c om } String inputDir = args[0]; String outputDir = args[1]; String parallel = args[2]; String user = System.getProperty("user.name"); JobConf lp = new JobConf(L12.class); lp.setJobName("L12 Find Highest Value Page Per User"); lp.setInputFormat(TextInputFormat.class); lp.setOutputKeyClass(Text.class); lp.setOutputValueClass(DoubleWritable.class); lp.setMapperClass(HighestValuePagePerUser.class); lp.setCombinerClass(HighestValuePagePerUser.class); lp.setReducerClass(HighestValuePagePerUser.class); Properties props = System.getProperties(); for (Map.Entry<Object, Object> entry : props.entrySet()) { lp.set((String) entry.getKey(), (String) entry.getValue()); } FileInputFormat.addInputPath(lp, new Path(inputDir + "/page_views")); FileOutputFormat.setOutputPath(lp, new Path(outputDir + "/highest_value_page_per_user")); lp.setNumReduceTasks(Integer.parseInt(parallel)); Job loadPages = new Job(lp); JobConf lu = new JobConf(L12.class); lu.setJobName("L12 Find Total Timespent per Term"); lu.setInputFormat(TextInputFormat.class); lu.setOutputKeyClass(Text.class); lu.setOutputValueClass(LongWritable.class); lu.setMapperClass(TotalTimespentPerTerm.class); lu.setCombinerClass(TotalTimespentPerTerm.class); lu.setReducerClass(TotalTimespentPerTerm.class); props = System.getProperties(); for (Map.Entry<Object, Object> entry : props.entrySet()) { lu.set((String) entry.getKey(), (String) entry.getValue()); } FileInputFormat.addInputPath(lu, new Path(inputDir + "/page_views")); FileOutputFormat.setOutputPath(lu, new Path(outputDir + "/total_timespent_per_term")); lu.setNumReduceTasks(Integer.parseInt(parallel)); Job loadUsers = new Job(lu); JobConf join = new JobConf(L12.class); join.setJobName("L12 Find Queries Per Action"); join.setInputFormat(TextInputFormat.class); join.setOutputKeyClass(Text.class); join.setOutputValueClass(LongWritable.class); join.setMapperClass(QueriesPerAction.class); join.setCombinerClass(QueriesPerAction.class); join.setReducerClass(QueriesPerAction.class); props = System.getProperties(); for (Map.Entry<Object, Object> entry : props.entrySet()) { join.set((String) entry.getKey(), (String) entry.getValue()); } FileInputFormat.addInputPath(join, new Path(inputDir + "/page_views")); FileOutputFormat.setOutputPath(join, new Path(outputDir + "/queries_per_action")); join.setNumReduceTasks(Integer.parseInt(parallel)); Job joinJob = new Job(join); JobControl jc = new JobControl("L12 join"); jc.addJob(loadPages); jc.addJob(loadUsers); jc.addJob(joinJob); new Thread(jc).start(); int i = 0; while (!jc.allFinished()) { ArrayList<Job> failures = jc.getFailedJobs(); if (failures != null && failures.size() > 0) { for (Job failure : failures) { System.err.println(failure.getMessage()); } break; } try { Thread.sleep(5000); } catch (InterruptedException e) { } if (i % 10000 == 0) { System.out.println("Running jobs"); ArrayList<Job> running = jc.getRunningJobs(); if (running != null && running.size() > 0) { for (Job r : running) { System.out.println(r.getJobName()); } } System.out.println("Ready jobs"); ArrayList<Job> ready = jc.getReadyJobs(); if (ready != null && ready.size() > 0) { for (Job r : ready) { System.out.println(r.getJobName()); } } System.out.println("Waiting jobs"); ArrayList<Job> waiting = jc.getWaitingJobs(); if (waiting != null && waiting.size() > 0) { for (Job r : ready) { System.out.println(r.getJobName()); } } System.out.println("Successful jobs"); ArrayList<Job> success = jc.getSuccessfulJobs(); if (success != null && success.size() > 0) { for (Job r : ready) { System.out.println(r.getJobName()); } } } i++; } ArrayList<Job> failures = jc.getFailedJobs(); if (failures != null && failures.size() > 0) { for (Job failure : failures) { System.err.println(failure.getMessage()); } } jc.stop(); }
From source file:org.apache.pig.test.pigmix.mapreduce.L13.java
License:Apache License
public static void main(String[] args) throws IOException { if (args.length != 3) { System.out.println("Parameters: inputDir outputDir parallel"); System.exit(1);//from w w w. j a v a 2s . c o m } String inputDir = args[0]; String outputDir = args[1]; String parallel = args[2]; String user = System.getProperty("user.name"); JobConf lp = new JobConf(L13.class); lp.setJobName("L13 Load Left Page Views"); lp.setInputFormat(TextInputFormat.class); lp.setOutputKeyClass(Text.class); lp.setOutputValueClass(Text.class); lp.setMapperClass(ReadLeftPageViews.class); Properties props = System.getProperties(); for (Map.Entry<Object, Object> entry : props.entrySet()) { lp.set((String) entry.getKey(), (String) entry.getValue()); } FileInputFormat.addInputPath(lp, new Path(inputDir + "/page_views")); FileOutputFormat.setOutputPath(lp, new Path(outputDir + "/indexed_left_pages")); lp.setNumReduceTasks(0); Job loadPages = new Job(lp); JobConf lu = new JobConf(L13.class); lu.setJobName("L13 Load Right Page Views"); lu.setInputFormat(TextInputFormat.class); lu.setOutputKeyClass(Text.class); lu.setOutputValueClass(Text.class); lu.setMapperClass(ReadRightPageViews.class); props = System.getProperties(); for (Map.Entry<Object, Object> entry : props.entrySet()) { lu.set((String) entry.getKey(), (String) entry.getValue()); } FileInputFormat.addInputPath(lu, new Path(inputDir + "/power_users_samples")); FileOutputFormat.setOutputPath(lu, new Path(outputDir + "/indexed_right_pages")); lu.setNumReduceTasks(0); Job loadUsers = new Job(lu); JobConf join = new JobConf(L13.class); join.setJobName("L13 Join Two Pages"); join.setInputFormat(KeyValueTextInputFormat.class); join.setOutputKeyClass(Text.class); join.setOutputValueClass(Text.class); join.setMapperClass(IdentityMapper.class); join.setReducerClass(Join.class); props = System.getProperties(); for (Map.Entry<Object, Object> entry : props.entrySet()) { join.set((String) entry.getKey(), (String) entry.getValue()); } FileInputFormat.addInputPath(join, new Path(outputDir + "/indexed_left_pages")); FileInputFormat.addInputPath(join, new Path(outputDir + "/indexed_right_pages")); FileOutputFormat.setOutputPath(join, new Path(outputDir + "/L13out")); join.setNumReduceTasks(Integer.parseInt(parallel)); Job joinJob = new Job(join); joinJob.addDependingJob(loadPages); joinJob.addDependingJob(loadUsers); JobControl jc = new JobControl("L13 join"); jc.addJob(loadPages); jc.addJob(loadUsers); jc.addJob(joinJob); new Thread(jc).start(); int i = 0; while (!jc.allFinished()) { ArrayList<Job> failures = jc.getFailedJobs(); if (failures != null && failures.size() > 0) { for (Job failure : failures) { System.err.println(failure.getMessage()); } break; } try { Thread.sleep(5000); } catch (InterruptedException e) { } if (i % 10000 == 0) { System.out.println("Running jobs"); ArrayList<Job> running = jc.getRunningJobs(); if (running != null && running.size() > 0) { for (Job r : running) { System.out.println(r.getJobName()); } } System.out.println("Ready jobs"); ArrayList<Job> ready = jc.getReadyJobs(); if (ready != null && ready.size() > 0) { for (Job r : ready) { System.out.println(r.getJobName()); } } System.out.println("Waiting jobs"); ArrayList<Job> waiting = jc.getWaitingJobs(); if (waiting != null && waiting.size() > 0) { for (Job r : ready) { System.out.println(r.getJobName()); } } System.out.println("Successful jobs"); ArrayList<Job> success = jc.getSuccessfulJobs(); if (success != null && success.size() > 0) { for (Job r : ready) { System.out.println(r.getJobName()); } } } i++; } ArrayList<Job> failures = jc.getFailedJobs(); if (failures != null && failures.size() > 0) { for (Job failure : failures) { System.err.println(failure.getMessage()); } } jc.stop(); }