List of usage examples for org.apache.hadoop.mapreduce Job setReducerClass
public void setReducerClass(Class<? extends Reducer> cls) throws IllegalStateException
From source file:adts.SuccessfullQueries.java
License:Open Source License
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); Job job = new Job(conf, "SuccessfullQueries"); job.setJarByClass(SuccessfullQueries.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); job.setMapperClass(Map.class); job.setReducerClass(Reduce.class); job.setInputFormatClass(TextInputFormat.class); job.setOutputFormatClass(TextOutputFormat.class); FileInputFormat.addInputPath(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); job.waitForCompletion(true);//from w w w .ja v a2 s. c o m }
From source file:AllLab_Skeleton.Lab1.Lab1_Wordcount.java
/** * @param args the command line arguments */// w ww.j av a2 s.co m public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException { Configuration conf = new Configuration(); Job job = Job.getInstance(conf, "word count"); job.setJarByClass(Lab1_Wordcount.class); job.setMapperClass(WordCount_Mapper.class); job.setCombinerClass(WordCount_Reducer.class); job.setReducerClass(WordCount_Reducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); FileInputFormat.addInputPath(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:AllLab_Skeleton.Lab2.Lab2SecondarySort.java
/** * @param args the command line arguments *//*from www . java 2 s. com*/ public static void main(String[] args) { try { Configuration conf = new Configuration(); Job job = Job.getInstance(conf, "SecondarySort"); job.setJarByClass(Lab2SecondarySort.class); job.setMapperClass(Lab2Mapper.class); job.setMapOutputKeyClass(CompositeKeyWritable.class); job.setMapOutputValueClass(NullWritable.class); job.setPartitionerClass(Lab2Partitioner.class); job.setGroupingComparatorClass(Lab2GroupComparator.class); job.setReducerClass(Lab2Reducer.class); job.setOutputKeyClass(CompositeKeyWritable.class); job.setOutputValueClass(NullWritable.class); job.setNumReduceTasks(8); FileInputFormat.addInputPath(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); } catch (IOException | InterruptedException | ClassNotFoundException ex) { System.out.println("Erorr Message" + ex.getMessage()); } }
From source file:AllLab_Skeleton.Lab4.Lab4_Std_dev.java
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException { Configuration conf = new Configuration(); Job job = Job.getInstance(conf, "medianstd"); job.setJarByClass(Lab4_Std_dev.class); job.setMapperClass(Map.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(DoubleWritable.class); job.setReducerClass(Reduce.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(MedianSDCustomWritable.class); FileInputFormat.addInputPath(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:AllLab_Skeleton.Lab6.ReduceSideJoin.java
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); Job job = Job.getInstance(conf, "ReduceSideJoin"); job.setJarByClass(ReduceSideJoin.class); // Use MultipleInputs to set which input uses what mapper // This will keep parsing of each data set separate from a logical // standpoint // The first two elements of the args array are the two inputs MultipleInputs.addInputPath(job, new Path(args[0]), TextInputFormat.class, UserJoinMapper.class); MultipleInputs.addInputPath(job, new Path(args[1]), TextInputFormat.class, CommentJoinMapper.class); job.getConfiguration().set("join.type", "leftouter"); //job.setNumReduceTasks(0); job.setReducerClass(UserJoinReducer.class); job.setOutputFormatClass(TextOutputFormat.class); TextOutputFormat.setOutputPath(job, new Path(args[2])); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); job.waitForCompletion(true);//from www . j a va 2s.c om }
From source file:alluxio.checker.MapReduceIntegrationChecker.java
License:Apache License
/** * Implements MapReduce with Alluxio integration checker. * * @return 0 for success, 2 for unable to find Alluxio classes, 1 otherwise *///w w w . j ava 2s .c o m private int run(String[] args) throws Exception { Configuration conf = new Configuration(); String numMaps = new GenericOptionsParser(conf, args).getRemainingArgs()[0]; conf.set(MRJobConfig.NUM_MAPS, numMaps); createHdfsFilesystem(conf); Job job = Job.getInstance(conf, "MapReduceIntegrationChecker"); job.setJarByClass(MapReduceIntegrationChecker.class); job.setMapperClass(CheckerMapper.class); job.setCombinerClass(CheckerReducer.class); job.setReducerClass(CheckerReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); job.setInputFormatClass(EmptyInputFormat.class); FileOutputFormat.setOutputPath(job, mOutputFilePath); try { if (!job.waitForCompletion(true)) { return 1; } Status resultStatus = generateReport(); return resultStatus.equals(Status.SUCCESS) ? 0 : (resultStatus.equals(Status.FAIL_TO_FIND_CLASS) ? 2 : 1); } finally { if (mFileSystem.exists(mOutputFilePath)) { mFileSystem.delete(mOutputFilePath, true); } mFileSystem.close(); } }
From source file:alluxio.examples.keyvalue.hadoop.CloneStoreMapReduce.java
License:Apache License
/** * @param args two parameters, the first is the input key-value store path, the second is the * output key-value store path/*from w w w. j av a 2 s .com*/ * @throws Exception if any exception happens */ public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); // NOTE(binfan): we are using the deprecated constructor of Job instance to compile with // hadoop-1.0. If this is not a concern, a better way is // Job job = Job.getInstance(conf); Job job = new Job(conf); job.setJobName("CloneStoreMapReduce"); job.setJarByClass(CloneStoreMapReduce.class); job.setOutputKeyClass(BytesWritable.class); job.setOutputValueClass(BytesWritable.class); job.setMapperClass(CloneStoreMapper.class); job.setReducerClass(CloneStoreReducer.class); job.setInputFormatClass(KeyValueInputFormat.class); job.setOutputFormatClass(KeyValueOutputFormat.class); FileInputFormat.setInputPaths(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:Analysis.A1_Total_Unique_Artists_on_Service.Distinct_Artist_Driver.java
/** * @param args the command line arguments *///from w ww . j a va2 s. c o m public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException { Configuration conf = new Configuration(); Job job = Job.getInstance(conf, "Distinct Artists available on Service"); job.setJarByClass(Distinct_Artist_Driver.class); job.setMapperClass(Distinct_Artist_Mapper.class); job.setCombinerClass(Distinct_Artist_Reducer.class); job.setReducerClass(Distinct_Artist_Reducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(NullWritable.class); FileInputFormat.addInputPath(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:Analysis.A2_Top_20_Most_Popular_Artists.Top_20_Most_Popular_Artist_Driver.java
/** * @param args the command line arguments *//* w ww.j a v a2s . c o m*/ public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException { Configuration conf = new Configuration(); Job job = Job.getInstance(conf, "Top 20 most popular artist "); job.setJarByClass(Top_20_Most_Popular_Artist_Driver.class); job.setMapperClass(Top_20_Most_Popular_Artist_Mapper.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(IntWritable.class); job.setNumReduceTasks(1); job.setCombinerClass(Top_20_Most_Popular_Artist_Combiner.class); job.setReducerClass(Top_20_Most_Popular_Artist_Reducer.class); job.setOutputKeyClass(NullWritable.class); job.setOutputValueClass(IntWritable.class); FileInputFormat.addInputPath(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:Analysis.A3_Total_Users_By_Gender.User_Gender_Count_Driver.java
/** * @param args the command line arguments *///from w w w .j av a 2 s. c om public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException { Configuration conf = new Configuration(); Job job = Job.getInstance(conf, "Genderwise Demography on Service"); job.setJarByClass(User_Gender_Count_Driver.class); job.setMapperClass(User_Gender_Count_Mapper.class); job.setCombinerClass(User_Gender_Count_Reducer.class); job.setReducerClass(User_Gender_Count_Reducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); FileInputFormat.addInputPath(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }