Example usage for org.apache.hadoop.mapreduce Job setPartitionerClass

List of usage examples for org.apache.hadoop.mapreduce Job setPartitionerClass

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Job setPartitionerClass.

Prototype

public void setPartitionerClass(Class<? extends Partitioner> cls) throws IllegalStateException 

Source Link

Document

Set the Partitioner for the job.

Usage

From source file:ph.fingra.hadoop.mapred.parts.component.ComponentResolutionStatistic.java

License:Apache License

public Job createJob(Configuration conf, Path[] inputpaths, Path outputpath, int numreduce,
        FingraphConfig finconfig) throws IOException {

    conf.setBoolean("verbose", finconfig.getDebug().isDebug_show_verbose());
    conf.setBoolean("counter", finconfig.getDebug().isDebug_show_counter());

    Job job = new Job(conf);
    String jobName = "component/componentresolution job";
    job.setJobName(jobName);//  ww  w  .  j  a v a 2  s.c  om

    job.setJarByClass(ComponentResolutionStatistic.class);

    for (int i = 0; i < inputpaths.length; i++) {
        FileInputFormat.addInputPath(job, inputpaths[i]);
    }
    FileOutputFormat.setOutputPath(job, outputpath);

    job.setMapperClass(ComponentResolutionMapper.class);
    job.setReducerClass(ComponentResolutionReducer.class);

    job.setMapOutputKeyClass(ComponentResolutionKey.class);
    job.setMapOutputValueClass(ComponentResolutionEntity.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);

    job.setPartitionerClass(ComponentResolutionPartitioner.class);
    job.setSortComparatorClass(ComponentResolutionSortComparator.class);
    job.setGroupingComparatorClass(ComponentResolutionGroupComparator.class);

    job.setNumReduceTasks(numreduce);

    return job;
}

From source file:ph.fingra.hadoop.mapred.parts.component.ComponentUserSessionStatistic.java

License:Apache License

public Job createJob(Configuration conf, Path[] inputpaths, Path outputpath, int numreduce,
        FingraphConfig finconfig) throws IOException {

    conf.setBoolean("verbose", finconfig.getDebug().isDebug_show_verbose());
    conf.setBoolean("counter", finconfig.getDebug().isDebug_show_counter());

    Job job = new Job(conf);
    String jobName = "component/componentusersession job";
    job.setJobName(jobName);//from w w w.j a  v  a  2 s .  c om

    job.setJarByClass(ComponentUserSessionStatistic.class);

    for (int i = 0; i < inputpaths.length; i++) {
        FileInputFormat.addInputPath(job, inputpaths[i]);
    }
    FileOutputFormat.setOutputPath(job, outputpath);

    job.setMapperClass(ComponentUserSessionMapper.class);
    job.setReducerClass(ComponentUserSessionReducer.class);

    job.setMapOutputKeyClass(ComponentUserSessionKey.class);
    job.setMapOutputValueClass(ComponentUserSessionEntity.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);

    job.setPartitionerClass(ComponentUserSessionPartitioner.class);
    job.setSortComparatorClass(ComponentUserSessionSortComparator.class);
    job.setGroupingComparatorClass(ComponentUserSessionGroupComparator.class);

    job.setNumReduceTasks(numreduce);

    return job;
}

From source file:ph.fingra.hadoop.mapred.parts.distribution.AppversionStatistic.java

License:Apache License

public Job createJob(Configuration conf, Path[] inputpaths, Path outputpath, int numreduce,
        FingraphConfig finconfig) throws IOException {

    conf.setBoolean("verbose", finconfig.getDebug().isDebug_show_verbose());
    conf.setBoolean("counter", finconfig.getDebug().isDebug_show_counter());

    Job job = new Job(conf);
    String jobName = "distribute/appversion job";
    job.setJobName(jobName);/*  w w w . ja va 2 s . com*/

    job.setJarByClass(AppversionStatistic.class);

    for (int i = 0; i < inputpaths.length; i++) {
        FileInputFormat.addInputPath(job, inputpaths[i]);
    }
    FileOutputFormat.setOutputPath(job, outputpath);

    job.setMapperClass(AppversionMapper.class);
    job.setReducerClass(AppversionReducer.class);

    job.setMapOutputKeyClass(AppversionKey.class);
    job.setMapOutputValueClass(AppversionEntity.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);

    job.setPartitionerClass(AppversionPartitioner.class);
    job.setSortComparatorClass(AppversionSortComparator.class);
    job.setGroupingComparatorClass(AppversionGroupComparator.class);

    job.setNumReduceTasks(numreduce);

    return job;
}

From source file:ph.fingra.hadoop.mapred.parts.distribution.CountryHourSessionStatistic.java

License:Apache License

public Job createJob(Configuration conf, Path[] inputpaths, Path outputpath, int numreduce,
        FingraphConfig finconfig) throws IOException {

    conf.setBoolean("verbose", finconfig.getDebug().isDebug_show_verbose());
    conf.setBoolean("counter", finconfig.getDebug().isDebug_show_counter());

    Job job = new Job(conf);
    String jobName = "distribute/countryhoursession job";
    job.setJobName(jobName);//www . j  a v a  2s  .  com

    job.setJarByClass(CountryHourSessionStatistic.class);

    for (int i = 0; i < inputpaths.length; i++) {
        FileInputFormat.addInputPath(job, inputpaths[i]);
    }
    FileOutputFormat.setOutputPath(job, outputpath);

    job.setMapperClass(CountryHourSessionMapper.class);
    job.setReducerClass(CountryHourSessionReducer.class);

    job.setMapOutputKeyClass(CountryHourSessionKey.class);
    job.setMapOutputValueClass(CountryHourSessionEntity.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);

    job.setPartitionerClass(CountryHourSessionPartitioner.class);
    job.setSortComparatorClass(CountryHourSessionSortComparator.class);
    job.setGroupingComparatorClass(CountryHourSessionGroupComparator.class);

    job.setNumReduceTasks(numreduce);

    return job;
}

From source file:ph.fingra.hadoop.mapred.parts.distribution.CountryNewuserStatistic.java

License:Apache License

public Job createJob(Configuration conf, Path[] inputpaths, Path outputpath, int numreduce,
        FingraphConfig finconfig, TargetDate targetdate) throws IOException {

    conf.setBoolean("verbose", finconfig.getDebug().isDebug_show_verbose());
    conf.setBoolean("counter", finconfig.getDebug().isDebug_show_counter());
    conf.set("runmode", targetdate.getRunmode());
    conf.set("year", targetdate.getYear());
    conf.set("month", targetdate.getMonth());
    conf.set("day", targetdate.getDay());
    conf.set("week", targetdate.getWeek_str());

    Job job = new Job(conf);
    String jobName = "distribute/countrynewuser job";
    job.setJobName(jobName);//w ww .  j  av  a 2 s  . c  o  m

    job.setJarByClass(CountryNewuserStatistic.class);

    for (int i = 0; i < inputpaths.length; i++) {
        FileInputFormat.addInputPath(job, inputpaths[i]);
    }
    FileOutputFormat.setOutputPath(job, outputpath);

    job.setMapperClass(CountryNewuserMapper.class);
    job.setCombinerClass(CountryNewuserReducer.class);
    job.setReducerClass(CountryNewuserReducer.class);

    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(LongWritable.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);

    job.setPartitionerClass(CountryNewuserPartitioner.class);

    job.setNumReduceTasks(numreduce);

    return job;
}

From source file:ph.fingra.hadoop.mapred.parts.distribution.CountryPageviewStatistic.java

License:Apache License

public Job createJob(Configuration conf, Path[] inputpaths, Path outputpath, int numreduce,
        FingraphConfig finconfig) throws IOException {

    conf.setBoolean("verbose", finconfig.getDebug().isDebug_show_verbose());
    conf.setBoolean("counter", finconfig.getDebug().isDebug_show_counter());

    Job job = new Job(conf);
    String jobName = "distribute/countrypageview job";
    job.setJobName(jobName);/* w  w  w.jav a  2  s .c om*/

    job.setJarByClass(CountryPageviewStatistic.class);

    for (int i = 0; i < inputpaths.length; i++) {
        FileInputFormat.addInputPath(job, inputpaths[i]);
    }
    FileOutputFormat.setOutputPath(job, outputpath);

    job.setMapperClass(CountryPageviewMapper.class);
    job.setCombinerClass(CountryPageviewReducer.class);
    job.setReducerClass(CountryPageviewReducer.class);

    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(LongWritable.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);

    job.setPartitionerClass(CountryPageviewPartitioner.class);

    job.setNumReduceTasks(numreduce);

    return job;
}

From source file:ph.fingra.hadoop.mapred.parts.distribution.CountrySessionLengthStatistic.java

License:Apache License

public Job createJobIntermediate(Configuration conf, Path[] inputpaths, Path outputpath, int numreduce,
        FingraphConfig finconfig) throws IOException {

    conf.setBoolean("verbose", finconfig.getDebug().isDebug_show_verbose());
    conf.setBoolean("counter", finconfig.getDebug().isDebug_show_counter());

    Job job = new Job(conf);
    String jobName = "distribute/countrysesstime job";
    job.setJobName(jobName);//  w ww.j a v  a2  s .  c  o m

    job.setJarByClass(CountrySessionLengthStatistic.class);

    for (int i = 0; i < inputpaths.length; i++) {
        FileInputFormat.addInputPath(job, inputpaths[i]);
    }
    FileOutputFormat.setOutputPath(job, outputpath);

    job.setMapperClass(CountrySesstimeMapper.class);
    job.setReducerClass(CountrySesstimeReducer.class);

    job.setMapOutputKeyClass(CountrySesstimeKey.class);
    job.setMapOutputValueClass(CountrySesstimeEntity.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);

    job.setPartitionerClass(CountrySesstimePartitioner.class);
    job.setSortComparatorClass(CountrySesstimeSortComparator.class);
    job.setGroupingComparatorClass(CountrySesstimeGroupComparator.class);

    job.setNumReduceTasks(numreduce);

    return job;
}

From source file:ph.fingra.hadoop.mapred.parts.distribution.CountrySessionLengthStatistic.java

License:Apache License

public Job createJobFinal(Configuration conf, Path inputpath, Path outputpath, int numreduce,
        FingraphConfig finconfig) throws IOException {

    conf.setBoolean("verbose", finconfig.getDebug().isDebug_show_verbose());
    conf.setBoolean("counter", finconfig.getDebug().isDebug_show_counter());

    Job job = new Job(conf);
    String jobName = "distribute/countrysessionlength job";
    job.setJobName(jobName);/*from w  w  w  .j  a v  a  2  s. co  m*/

    job.setJarByClass(CountrySessionLengthStatistic.class);

    FileInputFormat.addInputPath(job, inputpath);
    FileOutputFormat.setOutputPath(job, outputpath);

    job.setMapperClass(CountrySecondsessMapper.class);
    job.setCombinerClass(CountrySecondsessReducer.class);
    job.setReducerClass(CountrySecondsessReducer.class);

    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(LongWritable.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);

    job.setPartitionerClass(CountrySecondsessPartitioner.class);

    job.setNumReduceTasks(numreduce);

    return job;
}

From source file:ph.fingra.hadoop.mapred.parts.distribution.CountryStatistic.java

License:Apache License

public Job createJob(Configuration conf, Path[] inputpaths, Path outputpath, int numreduce,
        FingraphConfig finconfig) throws IOException {

    conf.setBoolean("verbose", finconfig.getDebug().isDebug_show_verbose());
    conf.setBoolean("counter", finconfig.getDebug().isDebug_show_counter());

    Job job = new Job(conf);
    String jobName = "distribute/country job";
    job.setJobName(jobName);/* w ww  . j  a  va  2 s .c  o  m*/

    job.setJarByClass(CountryStatistic.class);

    for (int i = 0; i < inputpaths.length; i++) {
        FileInputFormat.addInputPath(job, inputpaths[i]);
    }
    FileOutputFormat.setOutputPath(job, outputpath);

    job.setMapperClass(CountryMapper.class);
    job.setReducerClass(CountryReducer.class);

    job.setMapOutputKeyClass(CountryKey.class);
    job.setMapOutputValueClass(CountryEntity.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);

    job.setPartitionerClass(CountryPartitioner.class);
    job.setSortComparatorClass(CountrySortComparator.class);
    job.setGroupingComparatorClass(CountryGroupComparator.class);

    job.setNumReduceTasks(numreduce);

    return job;
}

From source file:ph.fingra.hadoop.mapred.parts.distribution.DeviceStatistic.java

License:Apache License

public Job createJob(Configuration conf, Path[] inputpaths, Path outputpath, int numreduce,
        FingraphConfig finconfig) throws IOException {

    conf.setBoolean("verbose", finconfig.getDebug().isDebug_show_verbose());
    conf.setBoolean("counter", finconfig.getDebug().isDebug_show_counter());

    Job job = new Job(conf);
    String jobName = "distribute/device job";
    job.setJobName(jobName);//from  www  .j  ava  2 s . c  om

    job.setJarByClass(DeviceStatistic.class);

    for (int i = 0; i < inputpaths.length; i++) {
        FileInputFormat.addInputPath(job, inputpaths[i]);
    }
    FileOutputFormat.setOutputPath(job, outputpath);

    job.setMapperClass(DeviceMapper.class);
    job.setReducerClass(DeviceReducer.class);

    job.setMapOutputKeyClass(DeviceKey.class);
    job.setMapOutputValueClass(DeviceEntity.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);

    job.setPartitionerClass(DevicePartitioner.class);
    job.setSortComparatorClass(DeviceSortComparator.class);
    job.setGroupingComparatorClass(DeviceGroupComparator.class);

    job.setNumReduceTasks(numreduce);

    return job;
}