List of usage examples for org.apache.hadoop.mapred JobConf setMapperClass
public void setMapperClass(Class<? extends Mapper> theClass)
From source file:edu.brown.cs.mapreduce.benchmarks.Benchmark1.java
License:Open Source License
public int run(String[] args) throws Exception { BenchmarkBase base = new BenchmarkBase(this.getConf(), this.getClass(), args); JobConf job = base.getJobConf(); job.setInputFormat(base.getSequenceFile() ? SequenceFileInputFormat.class : KeyValueTextInputFormat.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); if (base.getTupleData()) { job.setMapperClass(Benchmark1.TupleMap.class); } else {/*from www . j a v a2s . com*/ job.setMapperClass(Benchmark1.TextMap.class); } //job.setReducerClass(IdentityReducer.class); job.setNumReduceTasks(0); try { base.runJob(job); if (base.getCombine()) base.runCombine(); } catch (Exception ex) { ex.printStackTrace(); System.exit(1); } return 0; }
From source file:edu.brown.cs.mapreduce.benchmarks.Benchmark2.java
License:Open Source License
public int run(String[] args) throws Exception { BenchmarkBase base = new BenchmarkBase(this.getConf(), this.getClass(), args); JobConf job = base.getJobConf(); job.setInputFormat(base.getSequenceFile() ? SequenceFileInputFormat.class : KeyValueTextInputFormat.class); //job.setInputFormat(KeyValueSetInputFormat.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(DoubleWritable.class); if (base.getTupleData()) { job.setMapperClass(Benchmark2.TupleWritableMap.class); } else {/*w ww. j a v a 2s . c o m*/ job.setMapperClass(Benchmark2.TextMap.class); } job.setCombinerClass(Benchmark2.Reduce.class); job.setReducerClass(Benchmark2.Reduce.class); //job.setNumReduceTasks(0); try { base.runJob(job); if (base.getCombine()) base.runCombine(); } catch (Exception ex) { ex.printStackTrace(); System.exit(1); } return 0; }
From source file:edu.brown.cs.mapreduce.benchmarks.Benchmark3.java
License:Open Source License
public int run(String[] args) throws Exception { BenchmarkBase base = new BenchmarkBase(this.getConf(), this.getClass(), args); Date startTime = new Date(); System.out.println("Job started: " + startTime); // ------------------------------------------- // Phase #1/*from w w w .j a va 2 s. c o m*/ // ------------------------------------------- JobConf p1_job = base.getJobConf(); p1_job.setJobName(p1_job.getJobName() + ".Phase1"); Path p1_output = new Path(base.getOutputPath().toString() + "/phase1"); FileOutputFormat.setOutputPath(p1_job, p1_output); // // Make sure we have our properties // String required[] = { BenchmarkBase.PROPERTY_START_DATE, BenchmarkBase.PROPERTY_STOP_DATE }; for (String req : required) { if (!base.getOptions().containsKey(req)) { System.err.println("ERROR: The property '" + req + "' is not set"); System.exit(1); } } // FOR p1_job.setInputFormat( base.getSequenceFile() ? SequenceFileInputFormat.class : KeyValueTextInputFormat.class); if (base.getSequenceFile()) p1_job.setOutputFormat(SequenceFileOutputFormat.class); p1_job.setOutputKeyClass(Text.class); p1_job.setOutputValueClass(Text.class); p1_job.setMapperClass( base.getTupleData() ? edu.brown.cs.mapreduce.benchmarks.benchmark3.phase1.TupleWritableMap.class : edu.brown.cs.mapreduce.benchmarks.benchmark3.phase1.TextMap.class); p1_job.setReducerClass( base.getTupleData() ? edu.brown.cs.mapreduce.benchmarks.benchmark3.phase1.TupleWritableReduce.class : edu.brown.cs.mapreduce.benchmarks.benchmark3.phase1.TextReduce.class); p1_job.setCompressMapOutput(base.getCompress()); // ------------------------------------------- // Phase #2 // ------------------------------------------- JobConf p2_job = base.getJobConf(); p2_job.setJobName(p2_job.getJobName() + ".Phase2"); p2_job.setInputFormat( base.getSequenceFile() ? SequenceFileInputFormat.class : KeyValueTextInputFormat.class); if (base.getSequenceFile()) p2_job.setOutputFormat(SequenceFileOutputFormat.class); p2_job.setOutputKeyClass(Text.class); p2_job.setOutputValueClass(Text.class); p2_job.setMapperClass(IdentityMapper.class); p2_job.setReducerClass( base.getTupleData() ? edu.brown.cs.mapreduce.benchmarks.benchmark3.phase2.TupleWritableReduce.class : edu.brown.cs.mapreduce.benchmarks.benchmark3.phase2.TextReduce.class); p2_job.setCompressMapOutput(base.getCompress()); p2_job.setNumMapTasks(60); // ------------------------------------------- // Phase #3 // ------------------------------------------- JobConf p3_job = base.getJobConf(); p3_job.setJobName(p3_job.getJobName() + ".Phase3"); p3_job.setNumReduceTasks(1); p3_job.setInputFormat( base.getSequenceFile() ? SequenceFileInputFormat.class : KeyValueTextInputFormat.class); p3_job.setOutputKeyClass(Text.class); p3_job.setOutputValueClass(Text.class); //p3_job.setMapperClass(Phase3Map.class); p3_job.setMapperClass(IdentityMapper.class); p3_job.setReducerClass( base.getTupleData() ? edu.brown.cs.mapreduce.benchmarks.benchmark3.phase3.TupleWritableReduce.class : edu.brown.cs.mapreduce.benchmarks.benchmark3.phase3.TextReduce.class); // // Execute #1 // base.runJob(p1_job); // // Execute #2 // Path p2_output = new Path(base.getOutputPath().toString() + "/phase2"); FileOutputFormat.setOutputPath(p2_job, p2_output); FileInputFormat.setInputPaths(p2_job, p1_output); base.runJob(p2_job); // // Execute #3 // Path p3_output = new Path(base.getOutputPath().toString() + "/phase3"); FileOutputFormat.setOutputPath(p3_job, p3_output); FileInputFormat.setInputPaths(p3_job, p2_output); base.runJob(p3_job); // There does need to be a combine if (base.getCombine()) base.runCombine(); return 0; }
From source file:edu.brown.cs.mapreduce.benchmarks.Benchmark4.java
License:Open Source License
public int run(String[] args) throws Exception { BenchmarkBase base = new BenchmarkBase(this.getConf(), this.getClass(), args); JobConf job = base.getJobConf(); job.setInputFormat(TextInputFormat.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(LongWritable.class); job.setMapperClass(Benchmark4.Map.class); job.setCombinerClass(LongSumReducer.class); job.setReducerClass(LongSumReducer.class); try {/*from ww w . j a v a 2 s . c om*/ job.setCompressMapOutput(base.getCompress()); base.runJob(job); if (base.getCombine()) base.runCombine(); } catch (Exception ex) { ex.printStackTrace(); System.exit(1); } return 0; }
From source file:edu.brown.cs.mapreduce.benchmarks.DummyJob.java
License:Open Source License
public int run(String[] args) throws Exception { BenchmarkBase base = new BenchmarkBase(this.getConf(), this.getClass(), args); JobConf p1_job = base.getJobConf(); if (base.getSequenceFile()) p1_job.setInputFormat(SequenceFileInputFormat.class); p1_job.setMapperClass(DummyJob.Map.class); p1_job.setOutputKeyClass(Text.class); p1_job.setOutputValueClass(LongWritable.class); base.runJob(p1_job);/* www. j a v a 2 s . c om*/ return 0; }
From source file:edu.brown.cs.mapreduce.benchmarks.Grep.java
License:Open Source License
public int run(String[] args) throws Exception { BenchmarkBase base = new BenchmarkBase(this.getConf(), this.getClass(), args); // ------------------------------------------- // Search//ww w . j a v a2 s.c o m // ------------------------------------------- JobConf p1_job = base.getJobConf(); // // We have to grab all of the dirs in the directory that was passed in // // List<Path> inputs = base.getInputPaths(); // if (false && inputs.size() == 1) { // Path input_path = inputs.get(0); // FileSystem fs = null; // try { // fs = FileSystem.get(this.getConf()); // if (fs.getFileStatus(input_path).isDir()) { // //p1_job.set("mapred.input.dir", ""); // FileStatus paths[] = fs.listStatus(input_path); // for (FileStatus p : paths) { // FileInputFormat.addInputPath(p1_job, p.getPath()); // } // } // } catch (Exception ex) { // ex.printStackTrace(); // System.exit(-1); // } // } if (base.getSequenceFile()) p1_job.setInputFormat(SequenceFileInputFormat.class); p1_job.setMapperClass(Grep.RegexMapper.class); //p1_job.setCombinerClass(LongSumReducer.class); //p1_job.setReducerClass(LongSumReducer.class); //p1_job.setOutputFormat(SequenceFileOutputFormat.class); p1_job.setOutputKeyClass(Text.class); p1_job.setOutputValueClass(Text.class); base.runJob(p1_job); if (base.getCombine()) base.runCombine(); return 0; }
From source file:edu.brown.cs.mapreduce.benchmarks.Sort.java
License:Open Source License
public int run(String[] args) throws Exception { BenchmarkBase base = new BenchmarkBase(this.getConf(), this.getClass(), args); JobConf p1_job = base.getJobConf(); p1_job.setMapperClass(IdentityMapper.class); p1_job.setReducerClass(IdentityReducer.class); p1_job.setInputFormat(KeyValueTextInputFormat.class); p1_job.setOutputFormat(SequenceFileOutputFormat.class); //p1_job.setOutputFormat(NullOutputFormat.class); p1_job.setOutputKeyClass(Text.class); p1_job.setOutputValueClass(Text.class); base.runJob(p1_job);//from w w w .j ava 2 s . c o m if (base.getCombine()) base.runCombine(); return 0; }
From source file:edu.brown.cs.mapreduce.demo.OrderSum.java
License:Open Source License
/** * The main driver for word count map/reduce program. * Invoke this method to submit the map/reduce job. * @throws IOException When there is communication problems with the job tracker. *///from w w w.jav a 2 s.c o m public int run(String[] args) throws Exception { JobConf conf = new JobConf(this.getConf(), OrderSum.class); conf.setJobName(OrderSum.class.getSimpleName()); // Input File Format conf.setInputFormat(KeyValueTextInputFormat.class); // Output Key/Value Types conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(DoubleWritable.class); // Map/Reduce Classes conf.setMapperClass(OrderSum.OrderSumMapper.class); conf.setReducerClass(OrderSum.OrderSumReducer.class); // Input/Output Paths (HDFS) FileInputFormat.setInputPaths(conf, "/demo/input/"); FileOutputFormat.setOutputPath(conf, new Path("/demo/output/")); /***** Additional Features *****/ // Compression //conf.setCompressMapOutput(true); // Combine //conf.setCombinerClass(OrderSum.OrderSumReducer.class); // Create a single output file conf.setNumReduceTasks(1); // Pass search date on command-line /* uncomment configure! if (args.length == 1) { conf.set("edu.brown.cs.pavlo.search_date", args[0]); }*/ // Bombs away! JobClient.runJob(conf); return 0; }
From source file:edu.iit.marketbasket.MarketBasket.java
public static void main(String[] args) throws IOException { JobConf conf = new JobConf(MarketBasket.class); conf.setJobName("MarketBasket"); conf.setMapperClass(Map.class); conf.setReducerClass(Reduce.class); //conf.setJarByClass(MarketBasket.class); conf.setInputFormat(TextInputFormat.class); conf.setOutputFormat(TextOutputFormat.class); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(IntWritable.class); FileInputFormat.setInputPaths(conf, new Path(args[0])); FileOutputFormat.setOutputPath(conf, new Path(args[1])); JobClient.runJob(conf).waitForCompletion(); }
From source file:edu.ldzm.analysis.AnalysisSummary.java
License:Apache License
/** * The main driver for word count map/reduce program. Invoke this method to * submit the map/reduce job./*w w w . j ava 2s.c o m*/ * * @throws IOException * When there is communication problems with the job tracker. */ public int run(String[] args) throws Exception { JobConf conf = new JobConf(getConf(), AnalysisSummary.class); conf.setJobName("analysis_summery"); // the keys are words (strings) conf.setOutputKeyClass(Text.class); // the values are counts (ints) conf.setOutputValueClass(Text.class); conf.setMapperClass(MapClass.class); conf.setCombinerClass(Combine.class); conf.setReducerClass(Reduce.class); boolean param = false; List<String> other_args = new ArrayList<String>(); for (int i = 0; i < args.length; ++i) { try { if ("-m".equals(args[i])) { conf.setNumMapTasks(Integer.parseInt(args[++i])); } else if ("-r".equals(args[i])) { conf.setNumReduceTasks(Integer.parseInt(args[++i])); } else if ("-l".equals(args[i])) { param = true; String[] fields = args[++i].split(SEPARATOR); conf.setInt("NAME_LIST_LENGTH", fields.length); for (int j = 0; j < fields.length; j++) { if ("timeStamp".equals(fields[j])) { conf.setInt("REQUEST_TIME_INDEX", j); } else if ("elapsed".equals(fields[j])) { conf.setInt("REQUEST_ELAPSE_TIME_INDEX", j); } else if ("label".equals(fields[j])) { conf.setInt("REQUEST_LABEL_INDEX", j); } else if ("success".equals(fields[j])) { conf.setInt("REQUEST_SUCCESSFUL_INDEX", j); } else if ("bytes".equals(fields[j])) { conf.setInt("REQUEST_BYTE_INDEX", j); } } } else { other_args.add(args[i]); } } catch (NumberFormatException except) { System.out.println("ERROR: Integer expected instead of " + args[i]); return printUsage(); } catch (ArrayIndexOutOfBoundsException except) { System.out.println("ERROR: Required parameter missing from " + args[i - 1]); return printUsage(); } } // Make sure there are exactly 2 parameters left. if (other_args.size() != 2) { System.out.println("ERROR: Wrong number of parameters: " + other_args.size() + " instead of 2."); return printUsage(); } if (!param) { System.out.println("-l namelist.txt"); return printUsage(); } FileInputFormat.setInputPaths(conf, other_args.get(0)); FileOutputFormat.setOutputPath(conf, new Path(other_args.get(1))); JobClient.runJob(conf); return 0; }