Example usage for org.apache.hadoop.mapred JobConf setReducerClass

List of usage examples for org.apache.hadoop.mapred JobConf setReducerClass

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred JobConf setReducerClass.

Prototype

public void setReducerClass(Class<? extends Reducer> theClass) 

Source Link

Document

Set the Reducer class for the job.

Usage

From source file:SBP.java

License:Apache License

protected JobConf configUpdateMessage() throws Exception {
    final JobConf conf = new JobConf(getConf(), SBP.class);
    conf.set("nstate", "" + nstate);
    conf.set("compat_matrix_str", "" + edge_potential_str);
    conf.setJobName("BP_Update_message");

    conf.setMapperClass(MapUpdateMessage.class);
    conf.setReducerClass(RedUpdateMessage.class);

    fs.delete(message_next_path, true);//from ww w  .  ja  v  a 2  s  . c  om

    FileInputFormat.setInputPaths(conf, message_cur_path, prior_path);
    FileOutputFormat.setOutputPath(conf, message_next_path);

    conf.setNumReduceTasks(nreducer);

    conf.setOutputKeyClass(LongWritable.class);
    conf.setOutputValueClass(Text.class);

    return conf;
}

From source file:SBP.java

License:Apache License

protected JobConf configSmoothMessage() throws Exception {
    final JobConf conf = new JobConf(getConf(), SBP.class);
    conf.set("smooth_lambda", "" + lambda);
    conf.set("nstate", "" + nstate);
    conf.set("compat_matrix_str", "" + edge_potential_str);
    conf.setJobName("BP_Smooth_message");

    fs.delete(message_smooth_path, true);

    conf.setMapperClass(MapSmoothMessage.class);
    conf.setReducerClass(RedSmoothMessage.class);

    FileInputFormat.setInputPaths(conf, message_cur_path, message_next_path);
    FileOutputFormat.setOutputPath(conf, message_smooth_path);

    conf.setNumReduceTasks(nreducer);/*from   w  w w .j  av  a2  s .c om*/

    conf.setOutputKeyClass(LongWritable.class);
    conf.setOutputValueClass(Text.class);

    return conf;
}

From source file:SBP.java

License:Apache License

protected JobConf configCheckErr() throws Exception {
    final JobConf conf = new JobConf(getConf(), SBP.class);
    conf.set("nstate", "" + nstate);
    conf.setJobName("BP_Check Err");

    fs.delete(check_error_path, true);/*  w ww. ja  va  2  s  . c  o  m*/

    conf.setMapperClass(MapCheckErr.class);
    conf.setReducerClass(RedCheckErr.class);

    FileInputFormat.setInputPaths(conf, message_cur_path, message_smooth_path);
    FileOutputFormat.setOutputPath(conf, check_error_path);

    conf.setNumReduceTasks(nreducer);

    conf.setOutputKeyClass(LongWritable.class);
    conf.setOutputValueClass(Text.class);

    return conf;
}

From source file:SBP.java

License:Apache License

protected JobConf configSumErr() throws Exception {
    final JobConf conf = new JobConf(getConf(), SBP.class);
    conf.set("nstate", "" + nstate);
    conf.setJobName("BP_Sum Err");

    fs.delete(sum_error_path, true);/*from  w w w. ja v  a  2  s  .  c o m*/

    conf.setMapperClass(MapSumErr.class);
    conf.setReducerClass(RedSumErr.class);

    FileInputFormat.setInputPaths(conf, check_error_path);
    FileOutputFormat.setOutputPath(conf, sum_error_path);

    conf.setNumReduceTasks(1);

    conf.setOutputKeyClass(LongWritable.class);
    conf.setOutputValueClass(Text.class);

    return conf;
}

From source file:SBP.java

License:Apache License

protected JobConf configComputeBelief() throws Exception {
    final JobConf conf = new JobConf(getConf(), SBP.class);
    conf.set("nstate", "" + nstate);
    conf.set("compat_matrix_str", "" + edge_potential_str);
    conf.setJobName("BP_Compute_Belief");

    conf.setMapperClass(MapComputeBelief.class);
    conf.setReducerClass(RedComputeBelief.class);

    fs.delete(output_path, true);//from  w  ww .j av  a 2  s  . c o  m

    FileInputFormat.setInputPaths(conf, message_cur_path, prior_path);
    FileOutputFormat.setOutputPath(conf, output_path);

    conf.setNumReduceTasks(nreducer);

    conf.setOutputKeyClass(LongWritable.class);
    conf.setOutputValueClass(Text.class);

    return conf;
}

From source file:gen_rank.java

License:LGPL

public static void runjob(String input, String output) throws Exception {
    JobConf conf = new JobConf(gen_rank.class);
    conf.setJobName("Preparing_data");

    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(Text.class);

    conf.setMapperClass(Map.class);
    conf.setCombinerClass(Reduce.class);
    conf.setReducerClass(Reduce.class);

    conf.setInputFormat(TextInputFormat.class);
    conf.setOutputFormat(TextOutputFormat.class);

    FileInputFormat.setInputPaths(conf, new Path(input));
    FileOutputFormat.setOutputPath(conf, new Path(output));

    JobClient.runJob(conf);/*from w w w  .j a va 2  s. c o  m*/
}

From source file:WikipediaForwardIndexBuilder.java

License:Apache License

@SuppressWarnings("static-access")
@Override/*  w  w  w .  j  ava 2s .c o m*/
public int run(String[] args) throws Exception {
    Options options = new Options();
    options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("input").create(INPUT_OPTION));
    options.addOption(
            OptionBuilder.withArgName("path").hasArg().withDescription("index file").create(INDEX_FILE_OPTION));
    options.addOption(OptionBuilder.withArgName("en|sv|de|cs|es|zh|ar|tr").hasArg()
            .withDescription("two-letter language code").create(LANGUAGE_OPTION));

    CommandLine cmdline;
    CommandLineParser parser = new GnuParser();
    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        return -1;
    }

    if (!cmdline.hasOption(INPUT_OPTION) || !cmdline.hasOption(INDEX_FILE_OPTION)) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(this.getClass().getName(), options);
        ToolRunner.printGenericCommandUsage(System.out);
        return -1;
    }

    Path inputPath = new Path(cmdline.getOptionValue(INPUT_OPTION));
    String indexFile = cmdline.getOptionValue(INDEX_FILE_OPTION);

    String tmpPath = "tmp-" + WikipediaForwardIndexBuilder.class.getSimpleName() + "-" + RANDOM.nextInt(10000);

    if (!inputPath.isAbsolute()) {
        System.err.println("Error: " + INPUT_OPTION + " must be an absolute path!");
        return -1;
    }

    String language = null;
    if (cmdline.hasOption(LANGUAGE_OPTION)) {
        language = cmdline.getOptionValue(LANGUAGE_OPTION);
        if (language.length() != 2) {
            System.err.println("Error: \"" + language + "\" unknown language!");
            return -1;
        }
    }

    JobConf conf = new JobConf(getConf(), WikipediaForwardIndexBuilder.class);
    FileSystem fs = FileSystem.get(conf);

    LOG.info("Tool name: " + this.getClass().getName());
    LOG.info(" - input path: " + inputPath);
    LOG.info(" - index file: " + indexFile);
    LOG.info(" - language: " + language);
    LOG.info("Note: This tool only works on block-compressed SequenceFiles!");

    conf.setJobName(String.format("BuildWikipediaForwardIndex[%s: %s, %s: %s, %s: %s]", INPUT_OPTION, inputPath,
            INDEX_FILE_OPTION, indexFile, LANGUAGE_OPTION, language));

    conf.setNumReduceTasks(1);

    FileInputFormat.setInputPaths(conf, inputPath);
    FileOutputFormat.setOutputPath(conf, new Path(tmpPath));
    FileOutputFormat.setCompressOutput(conf, false);

    if (language != null) {
        conf.set("wiki.language", language);
    }

    conf.setInputFormat(NoSplitSequenceFileInputFormat.class);
    conf.setOutputKeyClass(IntWritable.class);
    conf.setOutputValueClass(Text.class);

    conf.setMapRunnerClass(MyMapRunner.class);
    conf.setReducerClass(IdentityReducer.class);

    // Delete the output directory if it exists already.
    fs.delete(new Path(tmpPath), true);

    RunningJob job = JobClient.runJob(conf);

    Counters counters = job.getCounters();
    int blocks = (int) counters.getCounter(Blocks.Total);

    LOG.info("number of blocks: " + blocks);

    LOG.info("Writing index file...");
    LineReader reader = new LineReader(fs.open(new Path(tmpPath + "/part-00000")));
    FSDataOutputStream out = fs.create(new Path(indexFile), true);

    out.writeUTF(edu.umd.cloud9.collection.wikipedia.WikipediaForwardIndex.class.getCanonicalName());
    out.writeUTF(inputPath.toString());
    out.writeInt(blocks);

    int cnt = 0;
    Text line = new Text();
    while (reader.readLine(line) > 0) {
        String[] arr = line.toString().split("\\s+");

        int docno = Integer.parseInt(arr[0]);
        int offset = Integer.parseInt(arr[1]);
        short fileno = Short.parseShort(arr[2]);

        out.writeInt(docno);
        out.writeInt(offset);
        out.writeShort(fileno);

        cnt++;

        if (cnt % 100000 == 0) {
            LOG.info(cnt + " blocks written");
        }
    }

    reader.close();
    out.close();

    if (cnt != blocks) {
        throw new RuntimeException("Error: mismatch in block count!");
    }

    // Clean up.
    fs.delete(new Path(tmpPath), true);

    return 0;
}

From source file:GapDeduceRunner.java

License:Apache License

public static void main(String[] args) throws IOException {
    JobConf conf = new JobConf(GapDeduceRunner.class);
    conf.setJobName("gapdeduce");

    conf.setMapOutputKeyClass(Text.class);
    conf.setMapOutputValueClass(Text.class);

    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(Text.class);

    conf.setMapperClass(Gapper.class);
    conf.setReducerClass(Deducer.class);

    // KeyValueTextInputFormat treats each line as an input record, 
    // and splits the line by the tab character to separate it into key and value 
    conf.setInputFormat(KeyValueTextInputFormat.class);
    conf.setOutputFormat(TextOutputFormat.class);

    FileInputFormat.setInputPaths(conf, new Path(args[0]));
    FileOutputFormat.setOutputPath(conf, new Path(args[1]));

    JobClient.runJob(conf);//  w  w  w  .j  av  a  2s  . co m
}

From source file:LicenseStatewiseCount.java

public static void main(String[] args) throws Exception {

    JobConf conf = new JobConf(ParkingTicketStatewiseCount.class);
    conf.setJobName("Statecounts");

    conf.setOutputKeyClass(Text.class);
    conf.setOutputValueClass(IntWritable.class);

    conf.setMapperClass(Map.class);
    conf.setReducerClass(Reduce.class);

    conf.setInputFormat(TextInputFormat.class);
    conf.setOutputFormat(TextOutputFormat.class);

    FileInputFormat.setInputPaths(conf, new Path(args[0]));
    FileOutputFormat.setOutputPath(conf, new Path(args[1]));

    JobClient.runJob(conf);// ww w . j av a2s .  c o  m

}

From source file:DijikstraAlgo.java

License:GNU General Public License

public static void run(String[] args) throws Exception {
    IN = "hdfs://10.8.3.161:9000/user/sagar/input/";
    OUT = "hdfs://10.8.3.161:9000/user/sagar/output/";
    String input = IN;//from w ww.  ja  va  2s.  c  o m
    String output = OUT + System.nanoTime();
    String MAX_SPLIT_SIZE = args[0];
    boolean isdone = false;

    // Reiteration again and again till the convergence
    while (isdone == false) {
        JobConf conf = new JobConf(DijikstraAlgo.class);
        conf.setJobName("Dijikstra");
        // conf.set("mapred.max.split.size", MAX_SPLIT_SIZE);
        conf.setOutputKeyClass(LongWritable.class);
        conf.setOutputValueClass(Text.class);
        conf.setMapperClass(Map.class);
        conf.setReducerClass(Reduce.class);
        conf.setInputFormat(TextInputFormat.class);
        conf.setOutputFormat(TextOutputFormat.class);

        FileInputFormat.setInputPaths(conf, new Path(input));
        FileOutputFormat.setOutputPath(conf, new Path(output));

        JobClient.runJob(conf);

        input = output + "/part-00000";
        isdone = true;// set the job to NOT run again!
        Path ofile = new Path(input);
        FileSystem fs = FileSystem.get(new URI("hdfs://10.8.3.165:9000"), conf);
        //FileSystem fs = FileSystem.get(new Configuration());
        BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(ofile)));
        HashMap<Integer, Integer> imap = new HashMap<Integer, Integer>();
        String line = br.readLine();
        // Read the current output file and put it into HashMap
        while (line != null) {
            String[] sp = line.split("\t| ");
            int node = Integer.parseInt(sp[0]);
            int distance = Integer.parseInt(sp[1]);
            imap.put(node, distance);
            line = br.readLine();
        }
        br.close();

        // Check for convergence condition if any node is still left then
        // continue else stop
        Iterator<Integer> itr = imap.keySet().iterator();
        while (itr.hasNext()) {
            int key = itr.next();
            int value = imap.get(key);
            if (value >= 125) {
                isdone = false;
            }
        }
        input = output;
        output = OUT + System.nanoTime();
    }
}