List of usage examples for org.apache.hadoop.mapreduce Job setPartitionerClass
public void setPartitionerClass(Class<? extends Partitioner> cls) throws IllegalStateException
From source file:ph.fingra.hadoop.mapred.parts.performance.NewuserStatistic.java
License:Apache License
public Job createHourJobFinal(Configuration conf, Path inputpath, Path outputpath, int numreduce, FingraphConfig finconfig, TargetDate targetdate) throws IOException { conf.setBoolean("verbose", finconfig.getDebug().isDebug_show_verbose()); conf.setBoolean("counter", finconfig.getDebug().isDebug_show_counter()); conf.set("year", targetdate.getYear()); conf.set("month", targetdate.getMonth()); conf.set("day", targetdate.getDay()); conf.set("hour", targetdate.getHour()); Job job = new Job(conf); String jobName = "perform/newuser hour job"; job.setJobName(jobName);//from ww w . jav a2 s . co m job.setJarByClass(NewuserStatistic.class); FileInputFormat.addInputPath(job, inputpath); FileOutputFormat.setOutputPath(job, outputpath); job.setMapperClass(NewuserHourMapper.class); job.setCombinerClass(NewuserHourReducer.class); job.setReducerClass(NewuserHourReducer.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(LongWritable.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(LongWritable.class); job.setPartitionerClass(NewuserHourPartitioner.class); job.setNumReduceTasks(numreduce); return job; }
From source file:ph.fingra.hadoop.mapred.parts.performance.PageviewStatistic.java
License:Apache License
public Job createJob(Configuration conf, Path[] inputpaths, Path outputpath, int numreduce, FingraphConfig finconfig) throws IOException { conf.setBoolean("verbose", finconfig.getDebug().isDebug_show_verbose()); conf.setBoolean("counter", finconfig.getDebug().isDebug_show_counter()); Job job = new Job(conf); String jobName = "perform/pageview job"; job.setJobName(jobName);/*from w w w .j a va 2s .c om*/ job.setJarByClass(PageviewStatistic.class); for (int i = 0; i < inputpaths.length; i++) { FileInputFormat.addInputPath(job, inputpaths[i]); } FileOutputFormat.setOutputPath(job, outputpath); job.setMapperClass(PageviewMapper.class); job.setCombinerClass(PageviewReducer.class); job.setReducerClass(PageviewReducer.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(LongWritable.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(LongWritable.class); job.setPartitionerClass(PageviewPartitioner.class); job.setNumReduceTasks(numreduce); return job; }
From source file:ph.fingra.hadoop.mapred.parts.performance.PageviewStatistic.java
License:Apache License
public Job createHourJob(Configuration conf, Path[] inputpaths, Path outputpath, int numreduce, FingraphConfig finconfig, TargetDate targetdate) throws IOException { conf.setBoolean("verbose", finconfig.getDebug().isDebug_show_verbose()); conf.setBoolean("counter", finconfig.getDebug().isDebug_show_counter()); conf.set("hour", targetdate.getHour()); Job job = new Job(conf); String jobName = "perform/pageview hour job"; job.setJobName(jobName);//from ww w. ja va 2 s .com job.setJarByClass(PageviewStatistic.class); for (int i = 0; i < inputpaths.length; i++) { FileInputFormat.addInputPath(job, inputpaths[i]); } FileOutputFormat.setOutputPath(job, outputpath); job.setMapperClass(PageviewHourMapper.class); job.setCombinerClass(PageviewHourReducer.class); job.setReducerClass(PageviewHourReducer.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(LongWritable.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(LongWritable.class); job.setPartitionerClass(PageviewHourPartitioner.class); job.setNumReduceTasks(numreduce); return job; }
From source file:ph.fingra.hadoop.mapred.parts.performance.SessionLengthStatistic.java
License:Apache License
public Job createJobIntermediate(Configuration conf, Path[] inputpaths, Path outputpath, int numreduce, FingraphConfig finconfig) throws IOException { conf.setBoolean("verbose", finconfig.getDebug().isDebug_show_verbose()); conf.setBoolean("counter", finconfig.getDebug().isDebug_show_counter()); Job job = new Job(conf); String jobName = "perform/sesstime job"; job.setJobName(jobName);/*from w w w.java 2s . c om*/ job.setJarByClass(SessionLengthStatistic.class); for (int i = 0; i < inputpaths.length; i++) { FileInputFormat.addInputPath(job, inputpaths[i]); } FileOutputFormat.setOutputPath(job, outputpath); job.setMapperClass(SesstimeMapper.class); job.setReducerClass(SesstimeReducer.class); job.setMapOutputKeyClass(SesstimeKey.class); job.setMapOutputValueClass(SesstimeEntity.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(LongWritable.class); job.setPartitionerClass(SesstimePartitioner.class); job.setSortComparatorClass(SesstimeSortComparator.class); job.setGroupingComparatorClass(SesstimeGroupComparator.class); job.setNumReduceTasks(numreduce); return job; }
From source file:ph.fingra.hadoop.mapred.parts.performance.SessionLengthStatistic.java
License:Apache License
public Job createJobFinal(Configuration conf, Path inputpath, Path outputpath, int numreduce, FingraphConfig finconfig) throws IOException { conf.setBoolean("verbose", finconfig.getDebug().isDebug_show_verbose()); conf.setBoolean("counter", finconfig.getDebug().isDebug_show_counter()); Job job = new Job(conf); String jobName = "perform/sessionlength job"; job.setJobName(jobName);// ww w.j a v a 2 s.com job.setJarByClass(SessionLengthStatistic.class); FileInputFormat.addInputPath(job, inputpath); FileOutputFormat.setOutputPath(job, outputpath); job.setMapperClass(SecondsessMapper.class); job.setCombinerClass(SecondsessReducer.class); job.setReducerClass(SecondsessReducer.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(LongWritable.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(LongWritable.class); job.setPartitionerClass(SecondsessPartitioner.class); job.setNumReduceTasks(numreduce); return job; }
From source file:ph.fingra.hadoop.mapred.parts.performance.SessionLengthStatistic.java
License:Apache License
public Job createHourJobIntermediate(Configuration conf, Path[] inputpaths, Path outputpath, int numreduce, FingraphConfig finconfig, TargetDate targetdate) throws IOException { conf.setBoolean("verbose", finconfig.getDebug().isDebug_show_verbose()); conf.setBoolean("counter", finconfig.getDebug().isDebug_show_counter()); conf.set("hour", targetdate.getHour()); Job job = new Job(conf); String jobName = "perform/sesstime hour job"; job.setJobName(jobName);/*from ww w. j a v a 2 s .com*/ job.setJarByClass(SessionLengthStatistic.class); for (int i = 0; i < inputpaths.length; i++) { FileInputFormat.addInputPath(job, inputpaths[i]); } FileOutputFormat.setOutputPath(job, outputpath); job.setMapperClass(SesstimeHourMapper.class); job.setReducerClass(SesstimeHourReducer.class); job.setMapOutputKeyClass(SesstimeHourKey.class); job.setMapOutputValueClass(SesstimeHourEntity.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(LongWritable.class); job.setPartitionerClass(SesstimeHourPartitioner.class); job.setSortComparatorClass(SesstimeHourSortComparator.class); job.setGroupingComparatorClass(SesstimeHourGroupComparator.class); job.setNumReduceTasks(numreduce); return job; }
From source file:ph.fingra.hadoop.mapred.parts.performance.SessionLengthStatistic.java
License:Apache License
public Job createHourJobFinal(Configuration conf, Path inputpath, Path outputpath, int numreduce, FingraphConfig finconfig) throws IOException { conf.setBoolean("verbose", finconfig.getDebug().isDebug_show_verbose()); conf.setBoolean("counter", finconfig.getDebug().isDebug_show_counter()); Job job = new Job(conf); String jobName = "perform/sessionlength hour job"; job.setJobName(jobName);/*from w w w . j av a2 s.c o m*/ job.setJarByClass(SessionLengthStatistic.class); FileInputFormat.addInputPath(job, inputpath); FileOutputFormat.setOutputPath(job, outputpath); job.setMapperClass(SecondsessHourMapper.class); job.setCombinerClass(SecondsessHourReducer.class); job.setReducerClass(SecondsessHourReducer.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(LongWritable.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(LongWritable.class); job.setPartitionerClass(SecondsessHourPartitioner.class); job.setNumReduceTasks(numreduce); return job; }
From source file:ph.fingra.hadoop.mapred.parts.performance.UserSessionStatistic.java
License:Apache License
public Job createJob(Configuration conf, Path[] inputpaths, Path outputpath, int numreduce, FingraphConfig finconfig) throws IOException { conf.setBoolean("verbose", finconfig.getDebug().isDebug_show_verbose()); conf.setBoolean("counter", finconfig.getDebug().isDebug_show_counter()); Job job = new Job(conf); String jobName = "perform/usersession job"; job.setJobName(jobName);/*from w w w. j a v a2 s. com*/ job.setJarByClass(UserSessionStatistic.class); for (int i = 0; i < inputpaths.length; i++) { FileInputFormat.addInputPath(job, inputpaths[i]); } FileOutputFormat.setOutputPath(job, outputpath); job.setMapperClass(UserSessionMapper.class); job.setReducerClass(UserSessionReducer.class); job.setMapOutputKeyClass(UserSessionKey.class); job.setMapOutputValueClass(UserSessionEntity.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); job.setPartitionerClass(UserSessionPartitioner.class); job.setSortComparatorClass(UserSessionSortComparator.class); job.setGroupingComparatorClass(UserSessionGroupComparator.class); job.setNumReduceTasks(numreduce); return job; }
From source file:ph.fingra.hadoop.mapred.parts.performance.UserSessionStatistic.java
License:Apache License
public Job createHourJob(Configuration conf, Path[] inputpaths, Path outputpath, int numreduce, FingraphConfig finconfig, TargetDate targetdate) throws IOException { conf.setBoolean("verbose", finconfig.getDebug().isDebug_show_verbose()); conf.setBoolean("counter", finconfig.getDebug().isDebug_show_counter()); conf.set("hour", targetdate.getHour()); Job job = new Job(conf); String jobName = "perform/usersession hour job"; job.setJobName(jobName);//from w w w . ja v a2s .co m job.setJarByClass(UserSessionStatistic.class); for (int i = 0; i < inputpaths.length; i++) { FileInputFormat.addInputPath(job, inputpaths[i]); } FileOutputFormat.setOutputPath(job, outputpath); job.setMapperClass(UserSessionHourMapper.class); job.setReducerClass(UserSessionHourReducer.class); job.setMapOutputKeyClass(UserSessionHourKey.class); job.setMapOutputValueClass(UserSessionHourEntity.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); job.setPartitionerClass(UserSessionHourPartitioner.class); job.setSortComparatorClass(UserSessionHourSortComparator.class); job.setGroupingComparatorClass(UserSessionHourGroupComparator.class); job.setNumReduceTasks(numreduce); return job; }
From source file:ph.fingra.hadoop.mapred.parts.prerole.AppNewuserMerge.java
License:Apache License
public Job createJob(Configuration conf, Path[] inputpaths, Path outputpath, int numreduce, FingraphConfig finconfig, Path dbpath, String dbfilename, TargetDate cutdate) throws IOException { conf.setBoolean("verbose", finconfig.getDebug().isDebug_show_verbose()); conf.setBoolean("counter", finconfig.getDebug().isDebug_show_counter()); conf.set("dbfilename", dbfilename); conf.set("cutyear", cutdate.getYear()); conf.set("cutmonth", cutdate.getMonth()); conf.set("cutday", cutdate.getDay()); Job job = new Job(conf); String jobName = "merge/appnewusermerge job"; job.setJobName(jobName);// w w w.jav a2 s.c o m job.setJarByClass(AppNewuserMerge.class); for (int i = 0; i < inputpaths.length; i++) { FileInputFormat.addInputPath(job, inputpaths[i]); } if (dbpath != null) { FileInputFormat.addInputPath(job, dbpath); } FileOutputFormat.setOutputPath(job, outputpath); job.setMapperClass(AppNewuserMapper.class); job.setCombinerClass(AppNewuserCombiner.class); job.setReducerClass(AppNewuserReducer.class); job.setMapOutputKeyClass(AppNewuserKey.class); job.setMapOutputValueClass(AppNewuserDb.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); job.setPartitionerClass(AppNewuserPartitioner.class); job.setNumReduceTasks(numreduce); return job; }