Example usage for org.apache.hadoop.mapreduce.lib.jobcontrol ControlledJob addDependingJob

List of usage examples for org.apache.hadoop.mapreduce.lib.jobcontrol ControlledJob addDependingJob

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce.lib.jobcontrol ControlledJob addDependingJob.

Prototype

public synchronized boolean addDependingJob(ControlledJob dependingJob) 

Source Link

Document

Add a job to this jobs' dependency list.

Usage

From source file:tv.icntv.grade.film.dbcollect.TableConcurrencyJob.java

License:Apache License

@Override
public int run(String[] strings) throws Exception {
    Configuration configuration = getConf();
    String[] tables = configuration.get("hbase.cdn.tables").split(",");
    JobControl jobControl = new JobControl("data init");
    for (String table : tables) {
        ///*from   www.j a  v a2s.  c  o  m*/
        String hbaseDbDirectory = String.format(configuration.get("hdfs.directory.from.hbase"), new Date(),
                table);
        HadoopUtils.deleteIfExist(hbaseDbDirectory);
        Job tableJob = new Job(configuration, "icntv grade init " + table);
        TableMapReduceUtil.initTableMapperJob(table, new Scan(), TableInitMapper.class, Text.class, Text.class,
                tableJob);
        MapReduceUtils.initReducerJob(new Path(hbaseDbDirectory), TableInitReducer.class, tableJob);
        // controlled job
        ControlledJob tableControlledJob = new ControlledJob(configuration);
        tableControlledJob.setJob(tableJob);

        String dbDirectory = String.format(configuration.get("hdfs.directory.base.db"), new Date(), table);
        HadoopUtils.deleteIfExist(dbDirectory);
        Configuration conf = getConf();
        Job db = new Job(conf, "icntv db collect " + table);
        conf.setLong("mapred.min.split.size", 512 * 2014 * 1024L);
        MapReduceUtils.initMapperJob(DefaultHbaseMapper.class, Text.class, Text.class, this.getClass(), db,
                new Path(strings[1]));
        FileOutputFormat.setOutputPath(db, new Path(dbDirectory));
        db.setNumReduceTasks(0);
        ControlledJob dbControlledJob = new ControlledJob(conf);
        dbControlledJob.setJob(db);
        dbControlledJob.addDependingJob(tableControlledJob);
        //controlledJob.
        jobControl.addJob(tableControlledJob);
        jobControl.addJob(dbControlledJob);
    }
    new Thread(jobControl).start();
    while (!jobControl.allFinished()) {
        Thread.sleep(5000);
    }
    return 0;
}

From source file:tv.icntv.grade.film.dbcollect.TableInitJob.java

License:Apache License

@Override
public int run(String[] strings) throws Exception {

    Configuration configuration = getConf();
    JobControl jobControl = new JobControl("init data");
    for (String table : strings) {
        String dbPath = String.format(configuration.get("hdfs.directory.base.db"), new Date(), table);
        //            String[] arrays = new String[]{table,//input table
        //                    String.format(configuration.get("hdfs.directory.from.hbase"), new Date(), table),
        //                    db
        //            };
        String hbasePath = String.format(configuration.get("hdfs.directory.from.hbase"), new Date(), table);
        //table job
        Job tableJob = new Job(configuration, "icntv grade init");
        Scan scan = new Scan();

        HadoopUtils.deleteIfExist(hbasePath);
        HadoopUtils.deleteIfExist(dbPath);
        TableMapReduceUtil.initTableMapperJob(table, scan, TableInitMapper.class, Text.class, Text.class,
                tableJob);/*from w w w .j a  v  a2s.  co m*/
        MapReduceUtils.initReducerJob(new Path(hbasePath), TableInitReducer.class, tableJob);
        ControlledJob firstControll = new ControlledJob(configuration);
        firstControll.setJob(tableJob);
        //            tableJob.waitForCompletion(true);
        Job db = new Job(configuration, "icntv db collect");
        configuration.setLong("mapred.min.split.size", 512 * 2014 * 1024L);
        MapReduceUtils.initMapperJob(DefaultHbaseMapper.class, Text.class, Text.class, this.getClass(), db,
                new Path(hbasePath));
        FileOutputFormat.setOutputPath(db, new Path(dbPath));
        db.setNumReduceTasks(0);
        ControlledJob secondaryController = new ControlledJob(configuration);
        secondaryController.setJob(db);
        secondaryController.addDependingJob(firstControll);
        jobControl.addJob(firstControll);
        jobControl.addJob(secondaryController);
    }
    new Thread(jobControl).start();
    while (!jobControl.allFinished()) {
        Thread.sleep(5000);
    }
    logger.info("job controller successed job size=" + jobControl.getSuccessfulJobList().size());
    //        db.waitForCompletion(true);
    return 0;
}