List of usage examples for org.apache.hadoop.mapreduce Job setReducerClass
public void setReducerClass(Class<? extends Reducer> cls) throws IllegalStateException
From source file:com.hadoop.secondarysort.SecondarySortDESC.java
License:Apache License
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); // if (otherArgs.length != 2) { // System.err.println("Usage: secondarysrot <in> <out>"); // System.exit(2); // }//from w w w.j a v a 2s . c o m // JobConf jobConf = new JobConf(); Job job = new Job(conf, "secondary sort"); job.setJarByClass(SecondarySortDESC.class); job.setMapperClass(MapClass.class); job.setReducerClass(Reduce.class); // group and partition by the first int in the pair job.setPartitionerClass(FirstPartitioner.class); job.setGroupingComparatorClass(FirstGroupingComparator.class); // conf.setClass("mapred.output.key.comparator.class", // KeyComparator.class, RawComparator.class); // job.setSortComparatorClass(SecondGroupingComparator.class); // the map output is IntPair, IntWritable job.setMapOutputKeyClass(IntPair.class); job.setMapOutputValueClass(IntWritable.class); // the reduce output is Text, IntWritable job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); FileInputFormat.addInputPath(job, new Path(inPath)); FileOutputFormat.setOutputPath(job, new Path(outPath)); FileSystem fileSystem = FileSystem.get(conf); if (fileSystem.exists(new Path(outPath))) { fileSystem.delete(new Path(outPath)); } System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:com.hhscyber.nl.tweets.hbase2.Hbase2.java
@Override public int run(String[] args) throws Exception { Job client = new Job(getConf(), "hbasetest"); client.setSpeculativeExecution(false); client.setMaxMapAttempts(2);/*from ww w . ja v a 2 s . c o m*/ client.setJarByClass(Hbase2.class); client.setOutputKeyClass(Text.class); client.setOutputValueClass(Text.class); client.setInputFormatClass(TextInputFormat.class); TextInputFormat.addInputPath(client, new Path("input/1441737001"));//test one folder TextOutputFormat.setOutputPath(client, new Path("output4")); client.setMapperClass(Hbase2Mapper.class); client.setReducerClass(Hbase2Reducer.class); try { client.waitForCompletion(true); } catch (IOException | InterruptedException | ClassNotFoundException e) { System.out.println(e); } return 0; }
From source file:com.hhscyber.nl.tweets.processtweets.ProcessTweets.java
/** * @param args the command line arguments *//*w w w .j ava2 s .c o m*/ public static void main(String[] args) throws IOException { Job client = new Job(new Configuration()); client.setJarByClass(ProcessTweets.class); client.setOutputKeyClass(Text.class); client.setOutputValueClass(IntWritable.class); client.setInputFormatClass(TextInputFormat.class); TextInputFormat.addInputPath(client, new Path("input_concat"));// TextOutputFormat.setOutputPath(client, new Path("output2")); client.setMapperClass(ProcessTweetsMapper.class); client.setReducerClass(ProcessTweetsReducer.class); client.setCombinerClass(ProcessTweetsReducer.class); try { client.submit(); } catch (Exception e) { e.printStackTrace(); } }
From source file:com.hhscyber.nl.tweets.svm.test.Tester.java
/** * @param args the command line arguments * @throws java.io.IOException/*from w ww. ja va 2s. c o m*/ */ public static void main(String[] args) throws IOException, Exception { Conf conf = new Conf(args, ""); Job job = new HBJob(conf, "TweetsSVMTester"); job.setJarByClass(Tester.class); Scan scan = new Scan(); TableMapReduceUtil.initTableMapperJob("hhscyber:tweets_lang", scan, TestMapper.class, ImmutableBytesWritable.class, Put.class, job); job.setOutputFormatClass(MultiTableOutputFormat.class); job.setReducerClass(TestReducer.class); job.setNumReduceTasks(2); TableMapReduceUtil.addDependencyJars(job); TableMapReduceUtil.addDependencyJars(job.getConfiguration()); job.waitForCompletion(true); }
From source file:com.hhscyber.nl.tweets.svm.train.Train.java
/** * @param args the command line arguments * @throws java.io.IOException/*from w w w .ja v a2 s. co m*/ */ public static void main(String[] args) throws IOException { Conf conf = new Conf(args, ""); FileSystem hdfs = FileSystem.get(conf); hdfs.delete(new Path("trainer"), true); Job client = new HBJob(conf, "SVMTrainer"); client.setJarByClass(Train.class); client.setMapOutputKeyClass(Text.class); client.setMapOutputValueClass(Text.class); client.setInputFormatClass(TextInputFormat.class); TextInputFormat.addInputPath(client, new Path("svmclass")); client.setNumReduceTasks(1); client.setOutputFormatClass(TextOutputFormat.class); TextOutputFormat.setOutputPath(client, new Path("trainer")); client.setMapperClass(TrainMapper.class); client.setReducerClass(TrainReducer.class); try { client.waitForCompletion(true); } catch (IOException | InterruptedException | ClassNotFoundException e) { } }
From source file:com.hn.cluster.hadoop.mrs.SecondarySort.java
License:Apache License
public static void main(String[] args) throws Exception { // ?hadoop?/*from w w w. j av a2 s .c om*/ Configuration conf = new Configuration(); // ? Job job = new Job(conf, "secondary sort"); job.setJarByClass(SecondarySort.class); // Mapper job.setMapperClass(MapClass.class); // Reducer job.setReducerClass(Reduce.class); // job.setPartitionerClass(FirstPartitioner.class); // job.setGroupingComparatorClass(FirstGroupingComparator.class); // map Key job.setMapOutputKeyClass(IntPair.class); // mapValue job.setMapOutputValueClass(IntWritable.class); // rduceKeyTextOutputFormatClassTextOutputFormat job.setOutputKeyClass(Text.class); // rduceValue job.setOutputValueClass(IntWritable.class); /** * ?????splites???RecordReder * ??RecordReder?keyvalue * Map<LongWritable, Text> * Mapmap<LongWritable, Text>Mapmap * ?List<IntPair, IntWritable> * map?job.setPartitionerClassList?reducer */ job.setInputFormatClass(TextInputFormat.class); // ??RecordWriter? job.setOutputFormatClass(TextOutputFormat.class); // hdfs FileInputFormat.addInputPath(job, new Path("hdfs://192.1168.1.12:9000/input/input/soso.txt")); // hdfs FileOutputFormat.setOutputPath(job, new Path("hdfs://192.1168.1.12:9000/output/sort/")); // ??job System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:com.hortonworks.mapreduce.URLCount.java
License:Apache License
@Override public int run(String[] args) throws Exception { Configuration conf = this.getConf(); conf.set("mapreduce.input.keyvaluelinerecordreader.key.value.separator", " "); Job job = Job.getInstance(conf, "URLCount"); job.setJarByClass(getClass());/* ww w . j a v a 2s . c om*/ job.setInputFormatClass(KeyValueTextInputFormat.class); job.setOutputFormatClass(TextOutputFormat.class); job.setMapperClass(URLCountM.class); job.setReducerClass(URLCountR.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(IntWritable.class); job.setOutputKeyClass(IntWritable.class); job.setOutputValueClass(Text.class); FileInputFormat.addInputPath(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); return (job.waitForCompletion(true) == true ? 0 : -1); }
From source file:com.howbuy.hadoop.mr.online.SecondarySort.java
License:Apache License
public static void main(String[] args) throws Exception { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (otherArgs.length != 2) { System.err.println("Usage: secondarysrot <in> <out>"); System.exit(2);//from ww w. j av a2 s . c o m } Job job = new Job(conf, "secondary sort"); job.setJarByClass(SecondarySort.class); job.setMapperClass(MapClass.class); job.setReducerClass(Reduce.class); // group and partition by the first int in the pair job.setPartitionerClass(FirstPartitioner.class); job.setGroupingComparatorClass(FirstGroupingComparator.class); // the map output is IntPair, IntWritable job.setMapOutputKeyClass(IntPair.class); job.setMapOutputValueClass(IntWritable.class); // the reduce output is Text, IntWritable job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); job.setInputFormatClass(TextInputFormat.class); // job.setOutputFormatClass(SequenceFileOutputFormat.class); job.setNumReduceTasks(3); FileInputFormat.addInputPath(job, new Path(otherArgs[0])); FileOutputFormat.setOutputPath(job, new Path(otherArgs[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:com.huihui.mr.WordCount.java
License:Apache License
public static void main(String[] args) throws Exception { /*HadoopJava.util.Properties??Apache Jakarta Commons Configuration?? * ????API?org.apache.hadoop.conf.Configuration??? *///from w ww .ja v a 2s . co m Configuration conf = new Configuration(); /* * ?HadoopGenericOptionsParser ??? -D mapreduce.job.queuename ??getRemainingArgs()? ?"xrli/STJoin_in","xrli/STJoin_out"?otherArgs ? fs jt libjars files archives D tokenCacheFile */ String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (otherArgs.length != 2) { System.err.println("Usage: wordcount <in> <out>"); System.exit(2); } conf.set("fs.defaultFS", "hdfs://localhost:9000"); // Job job = new Job(conf, "word count"); job.setJarByClass(WordCount.class); //??? job.setMapperClass(TokenizerMapper.class); job.setCombinerClass(IntSumReducer.class); job.setReducerClass(IntSumReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); //Path ???URI?Path???Path String input = "hdfs://localhost:9000/input/"; String output = "hdfs://localhost:9000/user/hdfs/log_kpi/browser1"; FileInputFormat.addInputPath(job, new Path(input)); FileOutputFormat.setOutputPath(job, new Path(output)); //???? System.exit(job.waitForCompletion(true) ? 0 : 1); }
From source file:com.hzy.test.WordCount.java
License:Apache License
public static void main(String[] args) throws Exception { // String input = "hdfs://192.168.1.118:9000/user/hdfs/log_kpi/"; // String output = "hdfs://192.168.1.118:9000/user/hdfs/log_kpi/wc/"; String input = "/tmp/data.txt"; // String input = args[0]; String output = "/tmp/t1"; // String output = args[1]; Configuration conf = HdfsDAO.config(); // conf.set("mapreduce.framework.name", "yarn"); //// conf.set("hbase.zookeeper.quorum", "hadoop01:2181"); // conf.set("fs.default.name", "hdfs://hadoop01:9000"); // conf.set("yarn.resourcemanager.resource-tracker.address", "hadoop01:8031"); // conf.set("yarn.resourcemanager.address", "hadoop01:8032"); // conf.set("yarn.resourcemanager.scheduler.address", "hadoop01:8030"); // conf.set("yarn.resourcemanager.admin.address", "hadoop01:8033"); // conf.set("mapreduce.jobhistory.address", "hadoop01:10020"); // conf.set("mapreduce.jobhistory.webapp.address", "hadoop01:19888"); // String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); // if (otherArgs.length < 2) { // System.err.println("Usage: wordcount <in> [<in>...] <out>"); // System.exit(2); // }/* w w w. j a va2s. c om*/ Job job = Job.getInstance(conf, "word count"); job.setJarByClass(WordCount.class); job.setMapperClass(TokenizerMapper.class); job.setCombinerClass(IntSumReducer.class); job.setReducerClass(IntSumReducer.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); // for (int i = 0; i < otherArgs.length - 1; ++i) { FileInputFormat.addInputPath(job, new Path(input)); // } FileOutputFormat.setOutputPath(job, new Path(output)); System.exit(job.waitForCompletion(true) ? 0 : 1); }