Example usage for org.apache.hadoop.mapreduce.lib.output FileOutputFormat setOutputPath

List of usage examples for org.apache.hadoop.mapreduce.lib.output FileOutputFormat setOutputPath

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce.lib.output FileOutputFormat setOutputPath.

Prototype

public static void setOutputPath(Job job, Path outputDir) 

Source Link

Document

Set the Path of the output directory for the map-reduce job.

Usage

From source file:Analysis.A4_High_Traffic_Countries.Top_10_Countries_by_User_Traffic_Driver.java

/**
 * @param args the command line arguments
 *///  w ww. ja  v  a 2s . c  o m

public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "Top 10 Countries by User Traffic");
    job.setJarByClass(Top_10_Countries_by_User_Traffic_Driver.class);
    job.setMapperClass(Top_10_Countries_by_User_Traffic_Mapper.class);

    job.setCombinerClass(Top_10_Countries_by_User_Traffic_Combiner.class);
    job.setReducerClass(Top_10_Countries_by_User_Traffic_Reducer.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);

    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:Analysis.A5_Min_Max_Median_Age_Top_Countries.Min_Max_Age_By_Country_Driver.java

/**
 * @param args the command line arguments
 *//*w  ww .ja  va  2s.  c  o  m*/
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "Min, Max, Average, Median age of users by country");
    job.setJarByClass(Min_Max_Age_By_Country_Driver.class);
    job.setMapperClass(Min_Max_Age_By_Country_Mapper.class);

    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(IntWritable.class);

    job.setReducerClass(Min_Max_Age_By_Country_Reducer.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);

    job.setNumReduceTasks(1);

    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:Analysis.A6_User_Differentiation_By_Age.Partition_Users_By_Age_Driver.java

/**
 * @param args the command line arguments
 *//*  w w  w.ja v  a  2s  . com*/
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "Users by Age");
    job.setJarByClass(Partition_Users_By_Age_Driver.class);

    job.setMapperClass(Partition_Users_By_Age_Mapper.class);
    job.setMapOutputKeyClass(IntWritable.class);
    job.setMapOutputValueClass(Text.class);

    // partitioner class inclusion
    job.setPartitionerClass(Partition_Users_By_Age_Partitioner.class);

    // set multiple formats for custom naming partitioning
    MultipleOutputs.addNamedOutput(job, "ageBins", TextOutputFormat.class, Text.class, NullWritable.class);
    MultipleOutputs.setCountersEnabled(job, true);

    //11-17, 18-25, 26-35, 36-49,50-65,66-80, 81-99

    // set num of reduce tasks based on partition we need (here we need 10 cos total no.of countries)
    job.setNumReduceTasks(8);
    job.setReducerClass(Partition_Users_By_Age_Reducer.class);

    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:Analysis.A7_Total_Signups_By_Year.Total_Signup_by_Year_Driver.java

/**
 * @param args the command line arguments
 *///from   ww w .  j a  v  a 2  s .c  o  m

public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "Total Signups by Year");
    job.setJarByClass(Total_Signup_by_Year_Driver.class);
    job.setMapperClass(Total_Signup_by_Year_Mapper.class);

    job.setCombinerClass(Total_Signup_by_Year_Reducer.class);
    job.setReducerClass(Total_Signup_by_Year_Reducer.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);

    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:Analysis.A8_Top_10_Most_Popular_Tracks.Top_10_Most_Popular_Tracks_Driver.java

/**
 * @param args the command line arguments
 *//*from  w w  w  . j  av a2 s .  c o m*/

public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "Top 10 most popular tracks ");
    job.setJarByClass(Top_10_Most_Popular_Tracks_Driver.class);

    job.setMapperClass(Top_10_Most_Popular_Tracks_Mapper.class);
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(IntWritable.class);

    job.setNumReduceTasks(1);
    job.setReducerClass(Top_10_Most_Popular_Tracks_Reducer.class);
    job.setOutputKeyClass(NullWritable.class);
    job.setOutputValueClass(IntWritable.class);

    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));

    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:Analysis.A9_Max_Activity_By_Time_of_Day.Most_Listens_By_Time_of_Day_Driver.java

/**
 * @param args the command line arguments
 *//* ww  w .j  a v  a  2 s . c om*/
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {
    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "Most listens by Time of the Day");
    job.setJarByClass(Most_Listens_By_Time_of_Day_Driver.class);

    job.setMapperClass(Most_Listens_By_Time_of_Day_Mapper.class);
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(NullWritable.class);

    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));

    int code = job.waitForCompletion(true) ? 0 : 1;

    if (code == 0) {
        for (Counter counter : job.getCounters()
                .getGroup(Most_Listens_By_Time_of_Day_Mapper.HOUR_COUNTER_GROUP)) {
            System.out.println(counter.getDisplayName() + "\t" + counter.getValue());
        }
    }

    FileSystem.get(conf).delete(new Path(args[1]), true);

    System.exit(code);
}

From source file:arpserver.HadoopTool.java

@Override
public int run(String[] strings) throws Exception {
    Configuration conf = new Configuration();
    String in = strings[0];/* w w w.j  a v a 2 s. co m*/
    String out = strings[1];
    FileSystem fs = FileSystem.get(conf);
    if (fs.exists(new Path(out))) {
        fs.delete(new Path(out), true);
        fs.delete(new Path(out + "Src"), true);
        fs.delete(new Path(out + "Mitm"), true);
        fs.delete(new Path(out + "ArpScn"), true);
        fs.delete(new Path(out + "s"), true);
        fs.delete(new Path(out + "d"), true);
        fs.delete(new Path(out + "t"), true);
    }
    Job job = new Job();
    Job job2 = new Job();
    Job job3 = new Job();
    Job job4 = new Job();
    Job job5 = new Job();
    Job job6 = new Job();
    Job job7 = new Job();
    job.setJobName("Q");
    job2.setJobName("Src");
    job3.setJobName("Mitm");
    job4.setJobName("ArpScn");
    job5.setJobName("s");
    job6.setJobName("d");
    job7.setJobName("time");
    job.setJarByClass(QuickDetect.class);

    job.setMapperClass(Qmapper.class);
    job.setReducerClass(Qreducer.class);

    job2.setMapperClass(Srcmapper.class);
    job2.setReducerClass(Srcreducer.class);

    job3.setMapperClass(ArpScanmapper.class);
    job3.setReducerClass(ArpScanreducer.class);

    job4.setMapperClass(Mitmmapper.class);
    job4.setReducerClass(Mitmreducer.class);

    job5.setMapperClass(Smapper.class);
    job5.setReducerClass(Sreducer.class);

    job6.setMapperClass(Dmapper.class);
    job6.setReducerClass(Dreducer.class);

    job7.setMapperClass(timemapper.class);
    job7.setReducerClass(timereducer.class);
    //testFinal168.txt
    job.setOutputKeyClass(NullWritable.class);
    job.setOutputValueClass(Text.class);

    job2.setOutputKeyClass(NullWritable.class);
    job2.setOutputValueClass(Text.class);

    job3.setOutputKeyClass(NullWritable.class);
    job3.setOutputValueClass(IntWritable.class);

    job4.setOutputKeyClass(NullWritable.class);
    job4.setOutputValueClass(Text.class);

    job5.setOutputKeyClass(NullWritable.class);
    job5.setOutputValueClass(Text.class);

    job6.setOutputKeyClass(NullWritable.class);
    job6.setOutputValueClass(Text.class);

    job7.setOutputKeyClass(NullWritable.class);
    job7.setOutputValueClass(Text.class);

    job.setMapOutputKeyClass(QuickDetect.class);
    job.setMapOutputValueClass(IntWritable.class);
    //job.setOutputFormatClass(YearMultipleTextOutputFormat.class);
    job2.setMapOutputKeyClass(DetectSrc.class);
    job2.setMapOutputValueClass(IntWritable.class);

    job3.setMapOutputKeyClass(DetectArpScan.class);
    job3.setMapOutputValueClass(IntWritable.class);

    job4.setMapOutputKeyClass(DetectMitm.class);
    job4.setMapOutputValueClass(IntWritable.class);

    job5.setMapOutputKeyClass(SMac.class);
    job5.setMapOutputValueClass(IntWritable.class);

    job6.setMapOutputKeyClass(DMac.class);
    job6.setMapOutputValueClass(IntWritable.class);

    job7.setMapOutputKeyClass(timeMac.class);
    job7.setMapOutputValueClass(IntWritable.class);

    FileInputFormat.addInputPath(job, new Path(in));
    FileOutputFormat.setOutputPath(job, new Path(out));
    if (job.waitForCompletion(true)) {
        FileInputFormat.addInputPath(job2, new Path(in));
        FileOutputFormat.setOutputPath(job2, new Path(out + "Src"));
        if (job2.waitForCompletion(true)) {
            FileInputFormat.addInputPath(job3, new Path(in));
            FileOutputFormat.setOutputPath(job3, new Path(out + "ArpScn"));
            if (job3.waitForCompletion(true)) {
                FileInputFormat.addInputPath(job4, new Path(in));
                FileOutputFormat.setOutputPath(job4, new Path(out + "Mitm"));
                if (job4.waitForCompletion(true)) {
                    FileInputFormat.addInputPath(job5, new Path(in));
                    FileOutputFormat.setOutputPath(job5, new Path(out + "s"));
                    if (job5.waitForCompletion(true)) {
                        FileInputFormat.addInputPath(job6, new Path(in));
                        FileOutputFormat.setOutputPath(job6, new Path(out + "d"));
                        if (job6.waitForCompletion(true)) {
                            FileInputFormat.addInputPath(job7, new Path(in));
                            FileOutputFormat.setOutputPath(job7, new Path(out + "t"));
                            job7.waitForCompletion(true);
                        } else {
                            return 1;
                        }
                    } else {
                        return 1;
                    }
                } else {
                    return 1;
                }
            } else {
                return 1;
            }
        } else {
            return 1;
        }
    } else {
        return 1;
    }
    return 0;
}

From source file:AshleyIngram.FYP.Hadoop.WordCount.java

License:Apache License

public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length != 2) {
        System.err.println("Usage: wordcount <in> <out>");
        System.exit(2);//from  w ww .  ja  v a2s. co  m
    }
    Job job = new Job(conf, "word count");
    job.setJarByClass(WordCount.class);
    job.setMapperClass(TokenizerMapper.class);
    job.setCombinerClass(IntSumReducer.class);
    job.setReducerClass(IntSumReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
    FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:assignment1.WordCount.LinkedSort.LinkedSort.java

License:Apache License

public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length < 2) {
        System.err.println("Usage: hadoop jar This.jar <in> [<in>...] <out>");
        System.exit(2);/*from  ww w  .  j  av  a  2  s.co  m*/
    }
    Job job = new Job(conf, "word count");
    job.setJarByClass(LinkedSort.class);
    job.setMapperClass(TokenizerMapper.class);
    //job.setCombinerClass(IntSumReducer.class);
    job.setReducerClass(IntSumReducer.class);
    job.setPartitionerClass(SortPartitioner.class);
    job.setOutputKeyClass(WordAndLength.class);
    job.setOutputValueClass(IntWritable.class);

    job.setNumReduceTasks(2);
    for (int i = 0; i < otherArgs.length - 1; ++i) {
        FileInputFormat.addInputPath(job, new Path(otherArgs[i]));
    }
    FileOutputFormat.setOutputPath(job, new Path(otherArgs[otherArgs.length - 1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}

From source file:assignment1.WordCount.WordCountInMap.java

License:Apache License

public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length < 2) {
        System.err.println("Usage: hadoop jar This.jar <in> [<in>...] <out>");
        System.exit(2);//from www.j a  va 2s  .  com
    }
    Job job = new Job(conf, "word count");
    job.setJarByClass(assignment1.WordCount.WordCountInMap.class);
    job.setMapperClass(TokenizerMapper.class);
    //job.setCombinerClass(IntSumReducer.class);
    job.setReducerClass(IntSumReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    for (int i = 0; i < otherArgs.length - 1; ++i) {
        FileInputFormat.addInputPath(job, new Path(otherArgs[i]));
    }
    FileOutputFormat.setOutputPath(job, new Path(otherArgs[otherArgs.length - 1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
}