Example usage for org.apache.hadoop.mapreduce.lib.jobcontrol JobControl allFinished

List of usage examples for org.apache.hadoop.mapreduce.lib.jobcontrol JobControl allFinished

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce.lib.jobcontrol JobControl allFinished.

Prototype

synchronized public boolean allFinished() 

Source Link

Usage

From source file:clustering.Utils.MapReduceUtils.java

License:Apache License

public static void runJobs(JobControl jobControl) {
    Thread jobRunnerThread = new Thread(jobControl);
    long starttime = System.currentTimeMillis();
    jobRunnerThread.start();/*from  w  ww  . j ava 2 s  .  c o m*/

    while (!jobControl.allFinished()) {
        try {
            Thread.sleep(1000);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    long endtime = System.currentTimeMillis();
    System.out.println("All jobs finished in: " + (endtime - starttime) / 1000 + " seconds");

    jobControl.stop();
}

From source file:com.laizuozuoba.WordCount.java

License:Apache License

public static void main(String[] args) throws Exception {
    // System.setProperty("hadoop.home.dir", "D:\\hadoop-2.2.0");
    Configuration conf = new Configuration();
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length != 2) {
        System.err.println("Usage: wordcount <in> <out>");
        System.exit(2);/*from ww  w.  j a va 2  s .c o m*/
    }
    Job job = new Job(conf, "word count");
    job.setJarByClass(WordCount.class);
    job.setMapperClass(TokenizerMapper.class);
    job.setCombinerClass(IntSumReducer.class);
    job.setReducerClass(IntSumReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
    FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));

    Job job2 = new Job(conf, "uv");
    job2.setJarByClass(WordCount.class);
    job2.setMapperClass(UVMapper.class);
    job2.setCombinerClass(UVReducer.class);
    job2.setReducerClass(UVReducer.class);
    job2.setOutputKeyClass(Text.class);
    job2.setOutputValueClass(IntWritable.class);
    FileInputFormat.addInputPath(job2, new Path(otherArgs[1]));
    FileOutputFormat.setOutputPath(job2, new Path("hdfs://10.18.106.67:9100/result2"));

    ControlledJob controlledJob = new ControlledJob(job.getConfiguration());
    ControlledJob controlledJob2 = new ControlledJob(job2.getConfiguration());
    controlledJob2.addDependingJob(controlledJob);
    JobControl jc = new JobControl("123");
    jc.addJob(controlledJob);
    jc.addJob(controlledJob2);

    Thread jcThread = new Thread(jc);
    jcThread.start();
    while (true) {
        if (jc.allFinished()) {
            System.out.println(jc.getSuccessfulJobList());
            jc.stop();
            break;
        }
        if (jc.getFailedJobList().size() > 0) {
            System.out.println(jc.getFailedJobList());
            jc.stop();
            break;
        }
        Thread.sleep(1000);
    }
    System.out.println("Finished!!!!!!!!!!!!!!!!!!!!!!!");
}

From source file:com.niuwa.hadoop.jobs.sample.JobControlTest.java

License:Apache License

public static void main(String[] args) throws Exception {
    HadoopUtil.isWinOrLiux();//from ww  w.  j a v  a2  s .  c om
    Configuration conf = new Configuration();
    String path = "hdfs://ns1:9000/user/root";
    if (args.length != 0) {
        path = args[0];
    }
    String[] args_1 = new String[] { path + "/chubao/input/contact",
            path + "/chubao/temp/" + DateUtil.format(new Date()) + "/contact_total",
            path + "/chubao/temp/" + DateUtil.format(new Date()) + "/contact_total_next" };
    String[] otherArgs = new GenericOptionsParser(conf, args_1).getRemainingArgs();
    // job
    Job job = Job.getInstance(conf, "word count");
    job.setJarByClass(JobControlTest.class);
    job.setMapperClass(UserIdMapper.class);
    job.setCombinerClass(IntSumReducer.class);
    job.setReducerClass(IntSumReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);

    FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
    // 
    deleteOutputFile(otherArgs[1], otherArgs[0]);
    FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));

    // job
    Job job2 = Job.getInstance(conf, "job2");
    job2.setJarByClass(JobControlTest.class);
    job2.setMapperClass(AddDateMapper.class);
    job2.setReducerClass(Job2Reducer.class);
    job2.setOutputKeyClass(IntWritable.class);
    job2.setOutputValueClass(Text.class);

    FileInputFormat.addInputPath(job2, new Path(otherArgs[1]));
    // 
    deleteOutputFile(otherArgs[2], otherArgs[1]);
    FileOutputFormat.setOutputPath(job2, new Path(otherArgs[2]));

    // ControlledJob
    ControlledJob controlledJob1 = new ControlledJob(job.getConfiguration());
    ControlledJob controlledJob2 = new ControlledJob(job2.getConfiguration());

    // ?
    controlledJob2.addDependingJob(controlledJob1);

    // JobControl
    JobControl jobControl = new JobControl("JobControlDemoGroup");
    jobControl.addJob(controlledJob1);
    jobControl.addJob(controlledJob2);

    // ?
    Thread jobControlThread = new Thread(jobControl);
    jobControlThread.start();
    while (true) {
        if (jobControl.allFinished()) {
            System.out.println(jobControl.getSuccessfulJobList());
            jobControl.stop();
            break;
        }
    }
}

From source file:com.zinnia.nectar.regression.hadoop.primitive.jobs.MeanJob.java

License:Apache License

public Double call() throws NectarException {
    double value = 0;
    JobControl jobControl = new JobControl("mean job");
    try {//from   w  w w. j av a2 s.c o m
        job = new Job();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    job.setJarByClass(MeanJob.class);
    log.info("Mean Job initialized");
    log.warn("Mean job: Processing...Do not terminate/close");
    log.debug("Mean job: Mapping process started");

    try {
        ChainMapper.addMapper(job, FieldSeperator.FieldSeperationMapper.class, DoubleWritable.class, Text.class,
                NullWritable.class, Text.class, job.getConfiguration());
        ChainMapper.addMapper(job, MeanMapper.class, NullWritable.class, Text.class, Text.class,
                DoubleWritable.class, job.getConfiguration());
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    job.getConfiguration().set("fields.spec", "" + column);
    job.getConfiguration().setInt("n", n);

    job.setReducerClass(DoubleSumReducer.class);
    try {
        FileInputFormat.addInputPath(job, new Path(inputFilePath));
        fs = FileSystem.get(job.getConfiguration());
        if (!fs.exists(new Path(inputFilePath))) {
            throw new NectarException("Exception occured:File " + inputFilePath + " not found ");
        }
    } catch (Exception e) {
        // TODO Auto-generated catch block
        String trace = new String();
        log.error(e.toString());
        for (StackTraceElement s : e.getStackTrace()) {
            trace += "\n\t at " + s.toString();
        }
        log.debug(trace);
        log.debug("Mean Job terminated abruptly\n");
        throw new NectarException();
    }
    FileOutputFormat.setOutputPath(job, new Path(outputFilePath));
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(DoubleWritable.class);
    job.setInputFormatClass(TextInputFormat.class);
    log.debug("Mean job: Mapping process completed");

    log.debug("Mean job: Reducing process started");
    try {
        controlledJob = new ControlledJob(job.getConfiguration());
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    jobControl.addJob(controlledJob);
    Thread thread = new Thread(jobControl);
    thread.start();
    while (!jobControl.allFinished()) {
        try {
            Thread.sleep(10000);
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
    jobControl.stop();
    try {
        FSDataInputStream in = fs.open(new Path(outputFilePath + "/part-r-00000"));
        BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in));
        String valueLine = bufferedReader.readLine();
        String[] fields = valueLine.split("\t");
        value = Double.parseDouble(fields[1]);
        bufferedReader.close();
        in.close();
    } catch (IOException e) {
        log.error("Exception occured: Output file cannot be read.");
        log.debug(e.getMessage());
        log.debug("Mean Job terminated abruptly\n");
        throw new NectarException();
    }
    log.debug("Mean job: Reducing process completed");
    log.info("Mean Job completed\n");
    return value;
}

From source file:com.zinnia.nectar.regression.hadoop.primitive.jobs.SigmaJob.java

License:Apache License

public Double call() throws NectarException {
    double value = 0;
    JobControl jobControl = new JobControl("sigmajob");
    try {//  w  ww. java  2s .co  m
        job = new Job();
    } catch (IOException e1) {
        // TODO Auto-generated catch block
        e1.printStackTrace();
    }
    job.setJarByClass(SigmaJob.class);
    log.info("Sigma Job initialized");
    log.warn("Sigma job: Processing...Do not terminate/close");
    log.debug("Sigma job: Mapping process started");
    try {
        ChainMapper.addMapper(job, FieldSeperator.FieldSeperationMapper.class, LongWritable.class, Text.class,
                NullWritable.class, Text.class, job.getConfiguration());
        ChainMapper.addMapper(job, SigmaMapper.class, NullWritable.class, Text.class, Text.class,
                DoubleWritable.class, job.getConfiguration());
    } catch (IOException e1) {
        // TODO Auto-generated catch block
        e1.printStackTrace();
    }
    job.getConfiguration().set("fields.spec", "" + column);

    job.setReducerClass(DoubleSumReducer.class);
    try {
        FileInputFormat.addInputPath(job, new Path(inputFilePath));
        fs = FileSystem.get(job.getConfiguration());
        if (!fs.exists(new Path(inputFilePath))) {
            throw new NectarException("Exception occured:File " + inputFilePath + " not found ");
        }
    } catch (Exception e2) {
        // TODO Auto-generated catch block
        String trace = new String();
        log.error(e2.toString());
        for (StackTraceElement s : e2.getStackTrace()) {
            trace += "\n\t at " + s.toString();
        }
        log.debug(trace);
        log.debug("Sigma Job terminated abruptly\n");
        throw new NectarException();
    }
    FileOutputFormat.setOutputPath(job, new Path(outputFilePath));
    job.setMapOutputValueClass(DoubleWritable.class);
    job.setMapOutputKeyClass(Text.class);
    job.setInputFormatClass(TextInputFormat.class);
    log.debug("Sigma job: Mapping process completed");

    log.debug("Sigma job: Reducing process started");
    try {
        controlledJob = new ControlledJob(job.getConfiguration());
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    jobControl.addJob(controlledJob);
    Thread thread = new Thread(jobControl);
    thread.start();
    while (!jobControl.allFinished()) {
        try {
            Thread.sleep(10000);
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
    try {
        FSDataInputStream in = fs.open(new Path(outputFilePath + "/part-r-00000"));
        BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in));
        String valueLine = bufferedReader.readLine();
        String[] fields = valueLine.split("\t");
        value = Double.parseDouble(fields[1]);
        bufferedReader.close();
        in.close();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        log.error("Exception occured: Output file cannot be read.");
        log.debug(e.getMessage());
        log.debug("Sigma Job terminated abruptly\n");
        throw new NectarException();
    }
    log.debug("Sigma job: Reducing process completed");
    log.info("Sigma Job completed\n");
    return value;
}

From source file:com.zinnia.nectar.regression.hadoop.primitive.jobs.SigmaSqJob.java

License:Apache License

public Double call() throws NectarException {
    // TODO Auto-generated method stub
    double value = 0;
    JobControl jobControl = new JobControl("sigmajob");
    try {//ww  w.j  a  v  a 2  s .c  om
        job = new Job();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    job.setJarByClass(SigmaSqJob.class);
    log.info("Sigma square Job initialized");
    log.warn("Sigma square job: Processing...Do not terminate/close");
    log.debug("Sigma square job: Mapping process started");

    try {
        ChainMapper.addMapper(job, FieldSeperator.FieldSeperationMapper.class, DoubleWritable.class, Text.class,
                NullWritable.class, Text.class, job.getConfiguration());
        ChainMapper.addMapper(job, SigmaSqMapper.class, NullWritable.class, Text.class, Text.class,
                DoubleWritable.class, job.getConfiguration());
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    job.getConfiguration().set("fields.spec", "" + column);
    job.setReducerClass(DoubleSumReducer.class);
    try {
        FileInputFormat.addInputPath(job, new Path(inputFilePath));
        fs = FileSystem.get(job.getConfiguration());
        if (!fs.exists(new Path(inputFilePath))) {
            throw new NectarException("Exception occured:File " + inputFilePath + " not found ");
        }
    } catch (Exception e) {
        // TODO Auto-generated catch block
        String trace = new String();
        log.error(e.toString());
        for (StackTraceElement s : e.getStackTrace()) {
            trace += "\n\t at " + s.toString();
        }
        log.debug(trace);
        log.debug("Sigma square Job terminated abruptly\n");
        throw new NectarException();
    }
    FileOutputFormat.setOutputPath(job, new Path(outputFilePath));
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(DoubleWritable.class);
    job.setInputFormatClass(TextInputFormat.class);
    log.debug("Sigma square job: Mapping process completed");

    log.debug("Sigma square job: Reducing process started");
    try {
        controlledJob = new ControlledJob(job.getConfiguration());
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    jobControl.addJob(controlledJob);
    Thread thread = new Thread(jobControl);
    thread.start();
    while (!jobControl.allFinished()) {
        try {
            Thread.sleep(10000);
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
    jobControl.stop();
    try {
        fs = FileSystem.get(job.getConfiguration());
        FSDataInputStream in = fs.open(new Path(outputFilePath + "/part-r-00000"));
        BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in));
        String valueLine = bufferedReader.readLine();
        String[] fields = valueLine.split("\t");
        value = Double.parseDouble(fields[1]);
        bufferedReader.close();
        in.close();
    } catch (IOException e) {
        log.error("Exception occured: Output file cannot be read.");
        log.debug(e.getMessage());
        log.debug("Sigma square Job terminated abruptly\n");
        throw new NectarException();
    }
    log.debug("Sigma square job: Reducing process completed");
    log.info("Sigma square Job completed\n");
    return value;
}

From source file:com.zinnia.nectar.regression.hadoop.primitive.jobs.SigmaXYJob.java

License:Apache License

public Double call() throws NectarException {
    double value = 0;
    JobControl jobControl = new JobControl("sigmajob");
    try {//from   www . ja  v a  2  s  .co  m
        job = new Job();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    job.setJarByClass(SigmaXYJob.class);
    log.info("SigmaXY Job initialized");
    log.warn("SigmaXY job: Processing...Do not terminate/close");
    log.debug("SigmaXY job: Mapping process started");

    try {
        ChainMapper.addMapper(job, FieldSeperator.FieldSeperationMapper.class, LongWritable.class, Text.class,
                NullWritable.class, Text.class, job.getConfiguration());
        ChainMapper.addMapper(job, SigmaXYMapper.class, NullWritable.class, Text.class, Text.class,
                DoubleWritable.class, job.getConfiguration());
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    job.getConfiguration().set("fields.spec", x + "," + y);

    job.setReducerClass(DoubleSumReducer.class);
    try {
        FileInputFormat.addInputPath(job, new Path(inputFilePath));
        fs = FileSystem.get(job.getConfiguration());
        if (!fs.exists(new Path(inputFilePath))) {
            throw new NectarException("Exception occured:File " + inputFilePath + " not found ");
        }
    } catch (Exception e) {
        // TODO Auto-generated catch block
        String trace = new String();
        log.error(e.toString());
        for (StackTraceElement s : e.getStackTrace()) {
            trace += "\n\t at " + s.toString();
        }
        log.debug(trace);
        log.debug("SigmaXY Job terminated abruptly\n");
        throw new NectarException();
    }
    FileOutputFormat.setOutputPath(job, new Path(outputFilePath));
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(DoubleWritable.class);
    job.setInputFormatClass(TextInputFormat.class);
    log.debug("SigmaXY job: Mapping process completed");

    log.debug("SigmaXY job: Reducing process started");
    try {
        controlledJob = new ControlledJob(job.getConfiguration());
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    jobControl.addJob(controlledJob);
    Thread thread = new Thread(jobControl);
    thread.start();
    while (!jobControl.allFinished()) {
        try {
            Thread.sleep(10000);
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }
    jobControl.stop();
    FileSystem fs;
    try {
        fs = FileSystem.get(job.getConfiguration());
        FSDataInputStream in = fs.open(new Path(outputFilePath + "/part-r-00000"));
        BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in));
        String valueLine = bufferedReader.readLine();
        String[] fields = valueLine.split("\t");
        value = Double.parseDouble(fields[1]);
        bufferedReader.close();
        in.close();
    } catch (IOException e) {
        log.error("Exception occured: Output file cannot be read.");
        log.debug(e.getMessage());
        log.debug("SigmaXY Job terminated abruptly\n");
        throw new NectarException();
    }
    log.debug("SigmaXY job: Reducing process completed");
    log.info("SigmaXY Job completed\n");
    return value;
}

From source file:com.zinnia.nectar.regression.hadoop.primitive.jobs.SortJob.java

License:Apache License

public Double[] call() throws NectarException {
    // TODO Auto-generated method stub
    JobControl jobControl = new JobControl("Sortjob");
    try {//from w ww .jav a  2s. c o  m
        job = new Job();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    job.setJarByClass(SortJob.class);
    log.info("Sorting Job initialized");
    log.warn("Sorting job: Processing...Do not terminate/close");
    log.debug("Sorting job: Mapping process started");

    try {
        ChainMapper.addMapper(job, FieldSeperator.FieldSeperationMapper.class, LongWritable.class, Text.class,
                NullWritable.class, Text.class, job.getConfiguration());
        ChainMapper.addMapper(job, SortMapper.class, NullWritable.class, Text.class, DoubleWritable.class,
                DoubleWritable.class, job.getConfiguration());
    } catch (IOException e1) {
        // TODO Auto-generated catch block
        e1.printStackTrace();
    }
    job.getConfiguration().set("fields.spec", "" + column);
    job.setReducerClass(Reducer.class);

    try {
        FileInputFormat.addInputPath(job, new Path(inputFilePath));
        fs = FileSystem.get(job.getConfiguration());
        if (!fs.exists(new Path(inputFilePath))) {
            throw new NectarException("Exception occured:File " + inputFilePath + " not found ");
        }
    } catch (Exception e2) {
        // TODO Auto-generated catch block
        String trace = new String();
        log.error(e2.toString());
        for (StackTraceElement s : e2.getStackTrace()) {
            trace += "\n\t at " + s.toString();
        }
        log.debug(trace);
        log.debug("Sorting Job terminated abruptly\n");
        throw new NectarException();
    }
    FileOutputFormat.setOutputPath(job, new Path(outputFilePath));
    job.setMapOutputValueClass(DoubleWritable.class);
    job.setMapOutputKeyClass(DoubleWritable.class);
    job.setInputFormatClass(TextInputFormat.class);
    log.debug("Sorting job: Mapping process completed");

    log.debug("Sorting job: Reducing process started");
    try {
        controlledJob = new ControlledJob(job.getConfiguration());
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    jobControl.addJob(controlledJob);
    Thread thread = new Thread(jobControl);
    thread.start();
    while (!jobControl.allFinished()) {
        try {
            Thread.sleep(10000);
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    }

    try {
        FSDataInputStream in = fs.open(new Path(outputFilePath + "/part-r-00000"));
        BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in));
        String valueLine;
        while ((valueLine = bufferedReader.readLine()) != null) {
            String[] fields = valueLine.split("\t");
            value.add(Double.parseDouble(fields[1]));
        }
        bufferedReader.close();
        in.close();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        log.error("Exception occured: Output file cannot be read.");
        log.debug(e.getMessage());
        log.debug("Sorting Job terminated abruptly\n");
        throw new NectarException();
    }
    log.debug("Sorting job: Reducing process completed");
    log.info("Sorting Job completed\n");
    return value.toArray(new Double[value.size()]);
}

From source file:com.zinnia.nectar.regression.hadoop.primitive.jobs.YDiffJob.java

License:Apache License

public Double call() throws Exception {
    JobControl jobControl = new JobControl("YDiff job");

    Job job = new Job();
    job.setJarByClass(YDiffJob.class);

    ChainMapper.addMapper(job, FieldSeperator.FieldSeperationMapper.class, DoubleWritable.class, Text.class,
            NullWritable.class, Text.class, job.getConfiguration());
    ChainMapper.addMapper(job, YDiffMapper.class, NullWritable.class, Text.class, Text.class,
            DoubleWritable.class, job.getConfiguration());

    String fieldSpec = getFieldSpecForColumns();
    job.getConfiguration().set("fields.spec", fieldSpec);
    job.getConfiguration().setStrings("paramValues", paramValues);
    job.setReducerClass(DoubleSumReducer.class);
    FileInputFormat.addInputPath(job, new Path(inputFilePath));
    FileOutputFormat.setOutputPath(job, new Path(outputFilePath));
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(DoubleWritable.class);
    job.setInputFormatClass(TextInputFormat.class);

    ControlledJob controlledJob = new ControlledJob(job.getConfiguration());
    jobControl.addJob(controlledJob);//from www  . j av a 2s . c om
    Thread thread = new Thread(jobControl);
    thread.start();
    while (!jobControl.allFinished()) {
        Thread.sleep(10000);
    }
    jobControl.stop();
    FileSystem fs = FileSystem.get(job.getConfiguration());
    FSDataInputStream in = fs.open(new Path(outputFilePath + "/part-r-00000"));
    BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(in));
    String valueLine = bufferedReader.readLine();
    String[] fields = valueLine.split("\t");
    double value = Double.parseDouble(fields[1]);
    bufferedReader.close();
    in.close();
    return value;
}

From source file:main.Driver.java

@Override
public int run(String[] args) throws Exception {
    // TODO Auto-generated method stub
    if (args.length != 5) {
        System.out.println("usage: [input1] [output1] [input2] [output2] [finaloutput]");
        System.exit(-1);//from w w  w .  j av  a 2 s.  co  m
    }

    _configuration = this.getConf();
    System.out.println(_configuration.get("Hello"));
    ControlledJob nameJob = setSortingJob(args[0], args[1], LicenseOutputFormat.NAMES);
    ControlledJob licenseJob = setSortingJob(args[2], args[3], LicenseOutputFormat.LICENSE);
    ControlledJob mrJob = setMRJob(args[1], args[3], args[4]);
    mrJob.addDependingJob(nameJob);
    mrJob.addDependingJob(licenseJob);
    JobControl jobControl = new JobControl("MyJob");
    jobControl.addJob(nameJob);
    jobControl.addJob(licenseJob);
    jobControl.addJob(mrJob);

    //jobControl.run();
    Thread thread = new Thread(jobControl);
    thread.start();

    while (!jobControl.allFinished()) {
        System.out.println("Running");
        Thread.sleep(5000);
    }
    System.out.println("<<<Done>>>");
    return 0;
}