Java tutorial
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package mapreducecountersoldapi; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobConf; /** * * @author miguel */ public class MapReduceCountersOldApi { /** * @param args the command line arguments */ public static void main(String[] args) throws Exception { if (args.length != 2) { System.err.println("Usage: MapReduceCounters <input path> <output path>"); System.exit(-1); } //Job job = new Job(); JobConf conf = new JobConf(MapReduceCountersOldApi.class); //job.setJarByClass(MapReduceCountersOldApi.class); conf.setJobName("Clics old Api"); //int Reducers = parseInt(args[2]); //job.setNumReduceTasks(Reducers); FileInputFormat.addInputPath(conf, new Path(args[0])); FileOutputFormat.setOutputPath(conf, new Path(args[1])); conf.setMapperClass(MaxCountMapperOldApi.class); conf.setReducerClass(MaxCountReducerOldApi.class); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(LongWritable.class); //job.setNumReduceTasks(tasks); // FileSystem fs = FileSystem.newInstance(job.getConfiguration()); // // Path inputFilePath = new Path(args[0]); // Path outputFilePath = new Path(args[1]); // if (fs.exists(outputFilePath)) { // fs.delete(outputFilePath, true); // } //System.exit(job.waitForCompletion(true) ? 0 : 1); JobClient.runJob(conf); } }