com.hadoopilluminated.examples.Join.java Source code

Java tutorial

Introduction

Here is the source code for com.hadoopilluminated.examples.Join.java

Source

package com.hadoopilluminated.examples;

/**
 * The example below is taken from org.apache.hadoop.examples It is then
 * commented and modified for the purposes of the book
 */

/**
 * Licensed to the Apache Software Foundation (ASF) under one or more
 * contributor license agreements. See the NOTICE file distributed with this
 * work for additional information regarding copyright ownership. The ASF
 * licenses this file to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 * License for the specific language governing permissions and limitations under
 * the License.
 */
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.mapred.join.CompositeInputFormat;
import org.apache.hadoop.mapred.join.TupleWritable;
import org.apache.hadoop.mapred.lib.IdentityMapper;
import org.apache.hadoop.mapred.lib.IdentityReducer;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

/**
 * This is the trivial map/reduce program that does absolutely nothing other
 * than use the framework to fragment and sort the input values.
 *
 * To run: bin/hadoop jar build/hadoop-examples.jar join [-m <i>maps</i>] [-r
 * <i>reduces</i>] [-inFormat <i>input format class</i>] [-outFormat <i>output
 * format class</i>] [-outKey <i>output key class</i>] [-outValue <i>output
 * value class</i>] [-joinOp &lt;inner|outer|override&gt;] [<i>in-dir</i>]*
 * <i>in-dir</i> <i>out-dir</i>
 */
public class Join extends Configured implements Tool {

    static int printUsage() {
        System.out.println("join [-m <maps>] [-r <reduces>] " + "[-inFormat <input format class>] "
                + "[-outFormat <output format class>] " + "[-outKey <output key class>] "
                + "[-outValue <output value class>] " + "[-joinOp <inner|outer|override>] "
                + "[input]* <input> <output>");
        ToolRunner.printGenericCommandUsage(System.out);
        return -1;
    }

    /**
     * The main driver for sort program. Invoke this method to submit the
     * map/reduce job.
     *
     * @throws IOException When there is communication problems with the job
     * tracker.
     */
    @Override
    public int run(String[] args) throws Exception {
        JobConf jobConf = new JobConf(getConf(), Sort.class);
        jobConf.setJobName("join");

        jobConf.setMapperClass(IdentityMapper.class);
        jobConf.setReducerClass(IdentityReducer.class);

        JobClient client = new JobClient(jobConf);
        ClusterStatus cluster = client.getClusterStatus();
        int num_maps = cluster.getTaskTrackers() * jobConf.getInt("test.sort.maps_per_host", 10);
        int num_reduces = (int) (cluster.getMaxReduceTasks() * 0.9);
        String sort_reduces = jobConf.get("test.sort.reduces_per_host");
        if (sort_reduces != null) {
            num_reduces = cluster.getTaskTrackers() * Integer.parseInt(sort_reduces);
        }
        Class<? extends InputFormat> inputFormatClass = SequenceFileInputFormat.class;
        Class<? extends OutputFormat> outputFormatClass = SequenceFileOutputFormat.class;
        Class<? extends WritableComparable> outputKeyClass = BytesWritable.class;
        Class<? extends Writable> outputValueClass = TupleWritable.class;
        String op = "inner";
        List<String> otherArgs = new ArrayList<String>();
        for (int i = 0; i < args.length; ++i) {
            try {
                if ("-m".equals(args[i])) {
                    num_maps = Integer.parseInt(args[++i]);
                } else if ("-r".equals(args[i])) {
                    num_reduces = Integer.parseInt(args[++i]);
                } else if ("-inFormat".equals(args[i])) {
                    inputFormatClass = Class.forName(args[++i]).asSubclass(InputFormat.class);
                } else if ("-outFormat".equals(args[i])) {
                    outputFormatClass = Class.forName(args[++i]).asSubclass(OutputFormat.class);
                } else if ("-outKey".equals(args[i])) {
                    outputKeyClass = Class.forName(args[++i]).asSubclass(WritableComparable.class);
                } else if ("-outValue".equals(args[i])) {
                    outputValueClass = Class.forName(args[++i]).asSubclass(Writable.class);
                } else if ("-joinOp".equals(args[i])) {
                    op = args[++i];
                } else {
                    otherArgs.add(args[i]);
                }
            } catch (NumberFormatException except) {
                System.out.println("ERROR: Integer expected instead of " + args[i]);
                return printUsage();
            } catch (ArrayIndexOutOfBoundsException except) {
                System.out.println("ERROR: Required parameter missing from " + args[i - 1]);
                return printUsage(); // exits
            }
        }

        // Set user-supplied (possibly default) job configs
        jobConf.setNumMapTasks(num_maps);
        jobConf.setNumReduceTasks(num_reduces);

        if (otherArgs.size() < 2) {
            System.out.println("ERROR: Wrong number of parameters: ");
            return printUsage();
        }

        FileOutputFormat.setOutputPath(jobConf, new Path(otherArgs.remove(otherArgs.size() - 1)));
        List<Path> plist = new ArrayList<Path>(otherArgs.size());
        for (String s : otherArgs) {
            plist.add(new Path(s));
        }

        jobConf.setInputFormat(CompositeInputFormat.class);
        jobConf.set("mapred.join.expr",
                CompositeInputFormat.compose(op, inputFormatClass, plist.toArray(new Path[0])));
        jobConf.setOutputFormat(outputFormatClass);

        jobConf.setOutputKeyClass(outputKeyClass);
        jobConf.setOutputValueClass(outputValueClass);

        Date startTime = new Date();
        System.out.println("Job started: " + startTime);
        JobClient.runJob(jobConf);
        Date end_time = new Date();
        System.out.println("Job ended: " + end_time);
        System.out.println("The job took " + (end_time.getTime() - startTime.getTime()) / 1000 + " seconds.");
        return 0;
    }

    public static void main(String[] args) throws Exception {
        int res = ToolRunner.run(new Configuration(), new Join(), args);
        System.exit(res);
    }
}