Example usage for org.apache.hadoop.mapred.jobcontrol JobControl getWaitingJobs

List of usage examples for org.apache.hadoop.mapred.jobcontrol JobControl getWaitingJobs

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred.jobcontrol JobControl getWaitingJobs.

Prototype

public ArrayList<Job> getWaitingJobs() 

Source Link

Usage

From source file:org.apache.pig.pen.LocalMapReduceSimulator.java

License:Apache License

@SuppressWarnings("unchecked")
public void launchPig(PhysicalPlan php, Map<LOLoad, DataBag> baseData, LineageTracer lineage,
        IllustratorAttacher attacher, ExampleGenerator eg, PigContext pc)
        throws PigException, IOException, InterruptedException {
    phyToMRMap.clear();//from  w  ww.  jav a 2  s .  c  om
    MROperPlan mrp = launcher.compile(php, pc);

    ConfigurationValidator.validatePigProperties(pc.getProperties());
    Configuration conf = ConfigurationUtil.toConfiguration(pc.getProperties());

    JobControlCompiler jcc = new JobControlCompiler(pc, conf);

    JobControl jc;
    int numMRJobsCompl = 0;
    DataBag input;
    List<Pair<PigNullableWritable, Writable>> intermediateData = new ArrayList<Pair<PigNullableWritable, Writable>>();

    Map<Job, MapReduceOper> jobToMroMap = jcc.getJobMroMap();
    HashMap<String, DataBag> output = new HashMap<String, DataBag>();
    Configuration jobConf;
    // jc is null only when mrp.size == 0
    boolean needFileInput;
    final ArrayList<OperatorKey> emptyInpTargets = new ArrayList<OperatorKey>();
    pc.getProperties().setProperty("pig.illustrating", "true");
    while (mrp.size() != 0) {
        jc = jcc.compile(mrp, "Illustrator");
        if (jc == null) {
            throw new ExecException("Native execution is not supported");
        }
        List<Job> jobs = jc.getWaitingJobs();
        for (Job job : jobs) {
            jobConf = job.getJobConf();
            FileLocalizer.setInitialized(false);
            ArrayList<ArrayList<OperatorKey>> inpTargets = (ArrayList<ArrayList<OperatorKey>>) ObjectSerializer
                    .deserialize(jobConf.get("pig.inpTargets"));
            intermediateData.clear();
            MapReduceOper mro = jobToMroMap.get(job);
            PigSplit split = null;
            List<POStore> stores = null;
            PhysicalOperator pack = null;
            // revisit as there are new physical operators from MR compilation 
            if (!mro.mapPlan.isEmpty())
                attacher.revisit(mro.mapPlan);
            if (!mro.reducePlan.isEmpty()) {
                attacher.revisit(mro.reducePlan);
                pack = mro.reducePlan.getRoots().get(0);
            }

            List<POLoad> lds = PlanHelper.getPhysicalOperators(mro.mapPlan, POLoad.class);
            if (!mro.mapPlan.isEmpty()) {
                stores = PlanHelper.getPhysicalOperators(mro.mapPlan, POStore.class);
            }
            if (!mro.reducePlan.isEmpty()) {
                if (stores == null)
                    stores = PlanHelper.getPhysicalOperators(mro.reducePlan, POStore.class);
                else
                    stores.addAll(PlanHelper.getPhysicalOperators(mro.reducePlan, POStore.class));
            }

            for (POStore store : stores) {
                output.put(store.getSFile().getFileName(), attacher.getDataMap().get(store));
            }

            OutputAttacher oa = new OutputAttacher(mro.mapPlan, output);
            oa.visit();

            if (!mro.reducePlan.isEmpty()) {
                oa = new OutputAttacher(mro.reducePlan, output);
                oa.visit();
            }
            int index = 0;
            for (POLoad ld : lds) {
                input = output.get(ld.getLFile().getFileName());
                if (input == null && baseData != null) {
                    for (LogicalRelationalOperator lo : baseData.keySet()) {
                        if (((LOLoad) lo).getSchemaFile().equals(ld.getLFile().getFileName())) {
                            input = baseData.get(lo);
                            break;
                        }
                    }
                }
                if (input != null)
                    mro.mapPlan.remove(ld);
            }
            for (POLoad ld : lds) {
                // check newly generated data first
                input = output.get(ld.getLFile().getFileName());
                if (input == null && baseData != null) {
                    if (input == null && baseData != null) {
                        for (LogicalRelationalOperator lo : baseData.keySet()) {
                            if (((LOLoad) lo).getSchemaFile().equals(ld.getLFile().getFileName())) {
                                input = baseData.get(lo);
                                break;
                            }
                        }
                    }
                }
                needFileInput = (input == null);
                split = new PigSplit(null, index, needFileInput ? emptyInpTargets : inpTargets.get(index), 0);
                ++index;
                Mapper<Text, Tuple, PigNullableWritable, Writable> map;

                if (mro.reducePlan.isEmpty()) {
                    // map-only
                    map = new PigMapOnly.Map();
                    Mapper<Text, Tuple, PigNullableWritable, Writable>.Context context = ((PigMapOnly.Map) map)
                            .getIllustratorContext(jobConf, input, intermediateData, split);
                    if (mro.isCounterOperation()) {
                        if (mro.isRowNumber()) {
                            map = new PigMapReduceCounter.PigMapCounter();
                        }
                        context = ((PigMapReduceCounter.PigMapCounter) map).getIllustratorContext(jobConf,
                                input, intermediateData, split);
                    }
                    ((PigMapBase) map).setMapPlan(mro.mapPlan);
                    map.run(context);
                } else {
                    if ("true".equals(jobConf.get("pig.usercomparator")))
                        map = new PigMapReduce.MapWithComparator();
                    else if (!"".equals(jobConf.get("pig.keyDistFile", "")))
                        map = new PigMapReduce.MapWithPartitionIndex();
                    else
                        map = new PigMapReduce.Map();
                    Mapper<Text, Tuple, PigNullableWritable, Writable>.Context context = ((PigMapBase) map)
                            .getIllustratorContext(jobConf, input, intermediateData, split);
                    ((PigMapBase) map).setMapPlan(mro.mapPlan);
                    map.run(context);
                }
            }

            if (!mro.reducePlan.isEmpty()) {
                if (pack instanceof POPackage)
                    mro.reducePlan.remove(pack);
                // reducer run
                PigMapReduce.Reduce reduce;
                if ("true".equals(jobConf.get("pig.usercomparator")))
                    reduce = new PigMapReduce.ReduceWithComparator();
                else
                    reduce = new PigMapReduce.Reduce();
                Reducer<PigNullableWritable, NullableTuple, PigNullableWritable, Writable>.Context context = reduce
                        .getIllustratorContext(job, intermediateData, (POPackage) pack);

                if (mro.isCounterOperation()) {
                    reduce = new PigMapReduceCounter.PigReduceCounter();
                    context = ((PigMapReduceCounter.PigReduceCounter) reduce).getIllustratorContext(job,
                            intermediateData, (POPackage) pack);
                }

                ((PigMapReduce.Reduce) reduce).setReducePlan(mro.reducePlan);
                reduce.run(context);
            }
            for (PhysicalOperator key : mro.phyToMRMap.keySet())
                for (PhysicalOperator value : mro.phyToMRMap.get(key))
                    phyToMRMap.put(key, value);
        }

        int removedMROp = jcc.updateMROpPlan(new LinkedList<Job>());

        numMRJobsCompl += removedMROp;
    }

    jcc.reset();
}

From source file:org.apache.pig.test.pigmix.mapreduce.L1.java

License:Apache License

public static void main(String[] args) throws IOException {

    if (args.length != 3) {
        System.out.println("Parameters: inputDir outputDir parallel");
        System.exit(1);/*  www  . jav  a2  s . c  o m*/
    }
    String inputDir = args[0];
    String outputDir = args[1];
    String parallel = args[2];
    JobConf lp = new JobConf(L1.class);
    lp.setJobName("L1 Load Page Views");
    lp.setInputFormat(TextInputFormat.class);
    lp.setOutputKeyClass(Text.class);
    lp.setOutputValueClass(IntWritable.class);
    lp.setMapperClass(ReadPageViews.class);
    lp.setCombinerClass(Group.class);
    lp.setReducerClass(Group.class);
    Properties props = System.getProperties();
    for (Map.Entry<Object, Object> entry : props.entrySet()) {
        lp.set((String) entry.getKey(), (String) entry.getValue());
    }
    FileInputFormat.addInputPath(lp, new Path(inputDir + "/page_views"));
    FileOutputFormat.setOutputPath(lp, new Path(outputDir + "/L1out"));
    lp.setNumReduceTasks(Integer.parseInt(parallel));
    Job group = new Job(lp);

    JobControl jc = new JobControl("L1 join");
    jc.addJob(group);

    new Thread(jc).start();

    int i = 0;
    while (!jc.allFinished()) {
        ArrayList<Job> failures = jc.getFailedJobs();
        if (failures != null && failures.size() > 0) {
            for (Job failure : failures) {
                System.err.println(failure.getMessage());
            }
            break;
        }

        try {
            Thread.sleep(5000);
        } catch (InterruptedException e) {
        }

        if (i % 10000 == 0) {
            System.out.println("Running jobs");
            ArrayList<Job> running = jc.getRunningJobs();
            if (running != null && running.size() > 0) {
                for (Job r : running) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Ready jobs");
            ArrayList<Job> ready = jc.getReadyJobs();
            if (ready != null && ready.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Waiting jobs");
            ArrayList<Job> waiting = jc.getWaitingJobs();
            if (waiting != null && waiting.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Successful jobs");
            ArrayList<Job> success = jc.getSuccessfulJobs();
            if (success != null && success.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
        }
        i++;
    }
    ArrayList<Job> failures = jc.getFailedJobs();
    if (failures != null && failures.size() > 0) {
        for (Job failure : failures) {
            System.err.println(failure.getMessage());
        }
    }
    jc.stop();
}

From source file:org.apache.pig.test.pigmix.mapreduce.L10.java

License:Apache License

public static void main(String[] args) throws IOException {

    if (args.length != 3) {
        System.out.println("Parameters: inputDir outputDir parallel");
        System.exit(1);//from   w  w  w . j  a  va  2s  .c  o m
    }
    String inputDir = args[0];
    String outputDir = args[1];
    String parallel = args[2];
    JobConf lp = new JobConf(L10.class);
    lp.setJobName("L10 Load Page Views");
    lp.setInputFormat(TextInputFormat.class);
    lp.setOutputKeyClass(MyType.class);
    lp.setOutputValueClass(Text.class);
    lp.setMapperClass(ReadPageViews.class);
    lp.setReducerClass(Group.class);
    lp.setPartitionerClass(MyPartitioner.class);
    Properties props = System.getProperties();
    for (Map.Entry<Object, Object> entry : props.entrySet()) {
        lp.set((String) entry.getKey(), (String) entry.getValue());
    }
    FileInputFormat.addInputPath(lp, new Path(inputDir + "/page_views"));
    FileOutputFormat.setOutputPath(lp, new Path(outputDir + "/L10out"));
    // Hardcode the parallel to 40 since MyPartitioner assumes it
    lp.setNumReduceTasks(40);
    Job group = new Job(lp);

    JobControl jc = new JobControl("L10 join");
    jc.addJob(group);

    new Thread(jc).start();

    int i = 0;
    while (!jc.allFinished()) {
        ArrayList<Job> failures = jc.getFailedJobs();
        if (failures != null && failures.size() > 0) {
            for (Job failure : failures) {
                System.err.println(failure.getMessage());
            }
            break;
        }

        try {
            Thread.sleep(5000);
        } catch (InterruptedException e) {
        }

        if (i % 10000 == 0) {
            System.out.println("Running jobs");
            ArrayList<Job> running = jc.getRunningJobs();
            if (running != null && running.size() > 0) {
                for (Job r : running) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Ready jobs");
            ArrayList<Job> ready = jc.getReadyJobs();
            if (ready != null && ready.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Waiting jobs");
            ArrayList<Job> waiting = jc.getWaitingJobs();
            if (waiting != null && waiting.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Successful jobs");
            ArrayList<Job> success = jc.getSuccessfulJobs();
            if (success != null && success.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
        }
        i++;
    }
    ArrayList<Job> failures = jc.getFailedJobs();
    if (failures != null && failures.size() > 0) {
        for (Job failure : failures) {
            System.err.println(failure.getMessage());
        }
    }
    jc.stop();
}

From source file:org.apache.pig.test.pigmix.mapreduce.L11.java

License:Apache License

public static void main(String[] args) throws IOException {

    if (args.length != 3) {
        System.out.println("Parameters: inputDir outputDir parallel");
        System.exit(1);// w  w  w.j av a  2 s  . co m
    }
    String inputDir = args[0];
    String outputDir = args[1];
    String parallel = args[2];
    String user = System.getProperty("user.name");
    JobConf lp = new JobConf(L11.class);
    lp.setJobName("L11 Load Page Views");
    lp.setInputFormat(TextInputFormat.class);
    lp.setOutputKeyClass(Text.class);
    lp.setOutputValueClass(Text.class);
    lp.setMapperClass(ReadPageViews.class);
    lp.setCombinerClass(ReadPageViews.class);
    lp.setReducerClass(ReadPageViews.class);
    Properties props = System.getProperties();
    for (Map.Entry<Object, Object> entry : props.entrySet()) {
        lp.set((String) entry.getKey(), (String) entry.getValue());
    }
    FileInputFormat.addInputPath(lp, new Path(inputDir + "/page_views"));
    FileOutputFormat.setOutputPath(lp, new Path(outputDir + "/p"));
    lp.setNumReduceTasks(Integer.parseInt(parallel));
    Job loadPages = new Job(lp);

    JobConf lu = new JobConf(L11.class);
    lu.setJobName("L11 Load Widerow");
    lu.setInputFormat(TextInputFormat.class);
    lu.setOutputKeyClass(Text.class);
    lu.setOutputValueClass(Text.class);
    lu.setMapperClass(ReadWideRow.class);
    lu.setCombinerClass(ReadWideRow.class);
    lu.setReducerClass(ReadWideRow.class);
    props = System.getProperties();
    for (Map.Entry<Object, Object> entry : props.entrySet()) {
        lu.set((String) entry.getKey(), (String) entry.getValue());
    }
    FileInputFormat.addInputPath(lu, new Path(inputDir + "/widerow"));
    FileOutputFormat.setOutputPath(lu, new Path(outputDir + "/wr"));
    lu.setNumReduceTasks(Integer.parseInt(parallel));
    Job loadWideRow = new Job(lu);

    JobConf join = new JobConf(L11.class);
    join.setJobName("L11 Union WideRow and Pages");
    join.setInputFormat(KeyValueTextInputFormat.class);
    join.setOutputKeyClass(Text.class);
    join.setOutputValueClass(Text.class);
    join.setMapperClass(IdentityMapper.class);
    join.setCombinerClass(Union.class);
    join.setReducerClass(Union.class);
    props = System.getProperties();
    for (Map.Entry<Object, Object> entry : props.entrySet()) {
        join.set((String) entry.getKey(), (String) entry.getValue());
    }
    FileInputFormat.addInputPath(join, new Path(outputDir + "/p"));
    FileInputFormat.addInputPath(join, new Path(outputDir + "/wr"));
    FileOutputFormat.setOutputPath(join, new Path(outputDir + "/L11out"));
    join.setNumReduceTasks(Integer.parseInt(parallel));
    Job joinJob = new Job(join);
    joinJob.addDependingJob(loadPages);
    joinJob.addDependingJob(loadWideRow);

    JobControl jc = new JobControl("L11 join");
    jc.addJob(loadPages);
    jc.addJob(loadWideRow);
    jc.addJob(joinJob);

    new Thread(jc).start();

    int i = 0;
    while (!jc.allFinished()) {
        ArrayList<Job> failures = jc.getFailedJobs();
        if (failures != null && failures.size() > 0) {
            for (Job failure : failures) {
                System.err.println(failure.getMessage());
            }
            break;
        }

        try {
            Thread.sleep(5000);
        } catch (InterruptedException e) {
        }

        if (i % 10000 == 0) {
            System.out.println("Running jobs");
            ArrayList<Job> running = jc.getRunningJobs();
            if (running != null && running.size() > 0) {
                for (Job r : running) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Ready jobs");
            ArrayList<Job> ready = jc.getReadyJobs();
            if (ready != null && ready.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Waiting jobs");
            ArrayList<Job> waiting = jc.getWaitingJobs();
            if (waiting != null && waiting.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Successful jobs");
            ArrayList<Job> success = jc.getSuccessfulJobs();
            if (success != null && success.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
        }
        i++;
    }
    ArrayList<Job> failures = jc.getFailedJobs();
    if (failures != null && failures.size() > 0) {
        for (Job failure : failures) {
            System.err.println(failure.getMessage());
        }
    }
    jc.stop();
}

From source file:org.apache.pig.test.pigmix.mapreduce.L12.java

License:Apache License

public static void main(String[] args) throws IOException {

    if (args.length != 3) {
        System.out.println("Parameters: inputDir outputDir parallel");
        System.exit(1);/*w  w w  .  j a  v a 2 s  .  c  om*/
    }
    String inputDir = args[0];
    String outputDir = args[1];
    String parallel = args[2];
    String user = System.getProperty("user.name");
    JobConf lp = new JobConf(L12.class);
    lp.setJobName("L12 Find Highest Value Page Per User");
    lp.setInputFormat(TextInputFormat.class);
    lp.setOutputKeyClass(Text.class);
    lp.setOutputValueClass(DoubleWritable.class);
    lp.setMapperClass(HighestValuePagePerUser.class);
    lp.setCombinerClass(HighestValuePagePerUser.class);
    lp.setReducerClass(HighestValuePagePerUser.class);
    Properties props = System.getProperties();
    for (Map.Entry<Object, Object> entry : props.entrySet()) {
        lp.set((String) entry.getKey(), (String) entry.getValue());
    }
    FileInputFormat.addInputPath(lp, new Path(inputDir + "/page_views"));
    FileOutputFormat.setOutputPath(lp, new Path(outputDir + "/highest_value_page_per_user"));
    lp.setNumReduceTasks(Integer.parseInt(parallel));
    Job loadPages = new Job(lp);

    JobConf lu = new JobConf(L12.class);
    lu.setJobName("L12 Find Total Timespent per Term");
    lu.setInputFormat(TextInputFormat.class);
    lu.setOutputKeyClass(Text.class);
    lu.setOutputValueClass(LongWritable.class);
    lu.setMapperClass(TotalTimespentPerTerm.class);
    lu.setCombinerClass(TotalTimespentPerTerm.class);
    lu.setReducerClass(TotalTimespentPerTerm.class);
    props = System.getProperties();
    for (Map.Entry<Object, Object> entry : props.entrySet()) {
        lu.set((String) entry.getKey(), (String) entry.getValue());
    }
    FileInputFormat.addInputPath(lu, new Path(inputDir + "/page_views"));
    FileOutputFormat.setOutputPath(lu, new Path(outputDir + "/total_timespent_per_term"));
    lu.setNumReduceTasks(Integer.parseInt(parallel));
    Job loadUsers = new Job(lu);

    JobConf join = new JobConf(L12.class);
    join.setJobName("L12 Find Queries Per Action");
    join.setInputFormat(TextInputFormat.class);
    join.setOutputKeyClass(Text.class);
    join.setOutputValueClass(LongWritable.class);
    join.setMapperClass(QueriesPerAction.class);
    join.setCombinerClass(QueriesPerAction.class);
    join.setReducerClass(QueriesPerAction.class);
    props = System.getProperties();
    for (Map.Entry<Object, Object> entry : props.entrySet()) {
        join.set((String) entry.getKey(), (String) entry.getValue());
    }
    FileInputFormat.addInputPath(join, new Path(inputDir + "/page_views"));
    FileOutputFormat.setOutputPath(join, new Path(outputDir + "/queries_per_action"));
    join.setNumReduceTasks(Integer.parseInt(parallel));
    Job joinJob = new Job(join);

    JobControl jc = new JobControl("L12 join");
    jc.addJob(loadPages);
    jc.addJob(loadUsers);
    jc.addJob(joinJob);

    new Thread(jc).start();

    int i = 0;
    while (!jc.allFinished()) {
        ArrayList<Job> failures = jc.getFailedJobs();
        if (failures != null && failures.size() > 0) {
            for (Job failure : failures) {
                System.err.println(failure.getMessage());
            }
            break;
        }

        try {
            Thread.sleep(5000);
        } catch (InterruptedException e) {
        }

        if (i % 10000 == 0) {
            System.out.println("Running jobs");
            ArrayList<Job> running = jc.getRunningJobs();
            if (running != null && running.size() > 0) {
                for (Job r : running) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Ready jobs");
            ArrayList<Job> ready = jc.getReadyJobs();
            if (ready != null && ready.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Waiting jobs");
            ArrayList<Job> waiting = jc.getWaitingJobs();
            if (waiting != null && waiting.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Successful jobs");
            ArrayList<Job> success = jc.getSuccessfulJobs();
            if (success != null && success.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
        }
        i++;
    }
    ArrayList<Job> failures = jc.getFailedJobs();
    if (failures != null && failures.size() > 0) {
        for (Job failure : failures) {
            System.err.println(failure.getMessage());
        }
    }
    jc.stop();
}

From source file:org.apache.pig.test.pigmix.mapreduce.L13.java

License:Apache License

public static void main(String[] args) throws IOException {

    if (args.length != 3) {
        System.out.println("Parameters: inputDir outputDir parallel");
        System.exit(1);/*from w  w w  .  ja  v a 2s .  co  m*/
    }
    String inputDir = args[0];
    String outputDir = args[1];
    String parallel = args[2];
    String user = System.getProperty("user.name");
    JobConf lp = new JobConf(L13.class);
    lp.setJobName("L13 Load Left Page Views");
    lp.setInputFormat(TextInputFormat.class);
    lp.setOutputKeyClass(Text.class);
    lp.setOutputValueClass(Text.class);
    lp.setMapperClass(ReadLeftPageViews.class);
    Properties props = System.getProperties();
    for (Map.Entry<Object, Object> entry : props.entrySet()) {
        lp.set((String) entry.getKey(), (String) entry.getValue());
    }
    FileInputFormat.addInputPath(lp, new Path(inputDir + "/page_views"));
    FileOutputFormat.setOutputPath(lp, new Path(outputDir + "/indexed_left_pages"));
    lp.setNumReduceTasks(0);
    Job loadPages = new Job(lp);

    JobConf lu = new JobConf(L13.class);
    lu.setJobName("L13 Load Right Page Views");
    lu.setInputFormat(TextInputFormat.class);
    lu.setOutputKeyClass(Text.class);
    lu.setOutputValueClass(Text.class);
    lu.setMapperClass(ReadRightPageViews.class);
    props = System.getProperties();
    for (Map.Entry<Object, Object> entry : props.entrySet()) {
        lu.set((String) entry.getKey(), (String) entry.getValue());
    }
    FileInputFormat.addInputPath(lu, new Path(inputDir + "/power_users_samples"));
    FileOutputFormat.setOutputPath(lu, new Path(outputDir + "/indexed_right_pages"));
    lu.setNumReduceTasks(0);
    Job loadUsers = new Job(lu);

    JobConf join = new JobConf(L13.class);
    join.setJobName("L13 Join Two Pages");
    join.setInputFormat(KeyValueTextInputFormat.class);
    join.setOutputKeyClass(Text.class);
    join.setOutputValueClass(Text.class);
    join.setMapperClass(IdentityMapper.class);
    join.setReducerClass(Join.class);
    props = System.getProperties();
    for (Map.Entry<Object, Object> entry : props.entrySet()) {
        join.set((String) entry.getKey(), (String) entry.getValue());
    }
    FileInputFormat.addInputPath(join, new Path(outputDir + "/indexed_left_pages"));
    FileInputFormat.addInputPath(join, new Path(outputDir + "/indexed_right_pages"));
    FileOutputFormat.setOutputPath(join, new Path(outputDir + "/L13out"));
    join.setNumReduceTasks(Integer.parseInt(parallel));
    Job joinJob = new Job(join);
    joinJob.addDependingJob(loadPages);
    joinJob.addDependingJob(loadUsers);

    JobControl jc = new JobControl("L13 join");
    jc.addJob(loadPages);
    jc.addJob(loadUsers);
    jc.addJob(joinJob);

    new Thread(jc).start();

    int i = 0;
    while (!jc.allFinished()) {
        ArrayList<Job> failures = jc.getFailedJobs();
        if (failures != null && failures.size() > 0) {
            for (Job failure : failures) {
                System.err.println(failure.getMessage());
            }
            break;
        }

        try {
            Thread.sleep(5000);
        } catch (InterruptedException e) {
        }

        if (i % 10000 == 0) {
            System.out.println("Running jobs");
            ArrayList<Job> running = jc.getRunningJobs();
            if (running != null && running.size() > 0) {
                for (Job r : running) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Ready jobs");
            ArrayList<Job> ready = jc.getReadyJobs();
            if (ready != null && ready.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Waiting jobs");
            ArrayList<Job> waiting = jc.getWaitingJobs();
            if (waiting != null && waiting.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Successful jobs");
            ArrayList<Job> success = jc.getSuccessfulJobs();
            if (success != null && success.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
        }
        i++;
    }
    ArrayList<Job> failures = jc.getFailedJobs();
    if (failures != null && failures.size() > 0) {
        for (Job failure : failures) {
            System.err.println(failure.getMessage());
        }
    }
    jc.stop();
}

From source file:org.apache.pig.test.pigmix.mapreduce.L14.java

License:Apache License

public static void main(String[] args) throws IOException {

    if (args.length != 3) {
        System.out.println("Parameters: inputDir outputDir parallel");
        System.exit(1);// w  w w. ja va  2  s.  c o m
    }
    String inputDir = args[0];
    String outputDir = args[1];
    String parallel = args[2];
    String user = System.getProperty("user.name");
    JobConf lp = new JobConf(L14.class);
    lp.setJobName("L14 Load Page Views");
    lp.setInputFormat(TextInputFormat.class);
    lp.setOutputKeyClass(Text.class);
    lp.setOutputValueClass(Text.class);
    lp.setMapperClass(ReadPageViews.class);
    Properties props = System.getProperties();
    for (Map.Entry<Object, Object> entry : props.entrySet()) {
        lp.set((String) entry.getKey(), (String) entry.getValue());
    }
    FileInputFormat.addInputPath(lp, new Path(inputDir + "/page_views_sorted"));
    FileOutputFormat.setOutputPath(lp, new Path(outputDir + "/indexed_pages_14"));
    lp.setNumReduceTasks(0);
    Job loadPages = new Job(lp);

    JobConf lu = new JobConf(L14.class);
    lu.setJobName("L14 Load Users");
    lu.setInputFormat(TextInputFormat.class);
    lu.setOutputKeyClass(Text.class);
    lu.setOutputValueClass(Text.class);
    lu.setMapperClass(ReadUsers.class);
    props = System.getProperties();
    for (Map.Entry<Object, Object> entry : props.entrySet()) {
        lu.set((String) entry.getKey(), (String) entry.getValue());
    }
    FileInputFormat.addInputPath(lu, new Path(inputDir + "/users_sorted"));
    FileOutputFormat.setOutputPath(lu, new Path(outputDir + "/indexed_users_14"));
    lu.setNumReduceTasks(0);
    Job loadUsers = new Job(lu);

    JobConf join = new JobConf(L14.class);
    join.setJobName("L14 Join Users and Pages");
    join.setInputFormat(KeyValueTextInputFormat.class);
    join.setOutputKeyClass(Text.class);
    join.setOutputValueClass(Text.class);
    join.setMapperClass(IdentityMapper.class);
    join.setReducerClass(Join.class);
    props = System.getProperties();
    for (Map.Entry<Object, Object> entry : props.entrySet()) {
        join.set((String) entry.getKey(), (String) entry.getValue());
    }
    FileInputFormat.addInputPath(join, new Path(outputDir + "/indexed_pages_14"));
    FileInputFormat.addInputPath(join, new Path(outputDir + "/indexed_users_14"));
    FileOutputFormat.setOutputPath(join, new Path(outputDir + "/L14out"));
    join.setNumReduceTasks(Integer.parseInt(parallel));
    Job joinJob = new Job(join);
    joinJob.addDependingJob(loadPages);
    joinJob.addDependingJob(loadUsers);

    JobControl jc = new JobControl("L14 join");
    jc.addJob(loadPages);
    jc.addJob(loadUsers);
    jc.addJob(joinJob);

    new Thread(jc).start();

    int i = 0;
    while (!jc.allFinished()) {
        ArrayList<Job> failures = jc.getFailedJobs();
        if (failures != null && failures.size() > 0) {
            for (Job failure : failures) {
                System.err.println(failure.getMessage());
            }
            break;
        }

        try {
            Thread.sleep(5000);
        } catch (InterruptedException e) {
        }

        if (i % 10000 == 0) {
            System.out.println("Running jobs");
            ArrayList<Job> running = jc.getRunningJobs();
            if (running != null && running.size() > 0) {
                for (Job r : running) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Ready jobs");
            ArrayList<Job> ready = jc.getReadyJobs();
            if (ready != null && ready.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Waiting jobs");
            ArrayList<Job> waiting = jc.getWaitingJobs();
            if (waiting != null && waiting.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Successful jobs");
            ArrayList<Job> success = jc.getSuccessfulJobs();
            if (success != null && success.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
        }
        i++;
    }
    ArrayList<Job> failures = jc.getFailedJobs();
    if (failures != null && failures.size() > 0) {
        for (Job failure : failures) {
            System.err.println(failure.getMessage());
        }
    }
    jc.stop();
}

From source file:org.apache.pig.test.pigmix.mapreduce.L15.java

License:Apache License

public static void main(String[] args) throws IOException {

    if (args.length != 3) {
        System.out.println("Parameters: inputDir outputDir parallel");
        System.exit(1);/*from w ww .j  a v  a 2s .  co m*/
    }
    String inputDir = args[0];
    String outputDir = args[1];
    String parallel = args[2];
    JobConf lp = new JobConf(L15.class);
    lp.setJobName("L15 Load Page Views");
    lp.setInputFormat(TextInputFormat.class);
    lp.setOutputKeyClass(Text.class);
    lp.setOutputValueClass(Text.class);
    lp.setMapperClass(ReadPageViews.class);
    lp.setCombinerClass(Combiner.class);
    lp.setReducerClass(Group.class);
    Properties props = System.getProperties();
    for (Map.Entry<Object, Object> entry : props.entrySet()) {
        lp.set((String) entry.getKey(), (String) entry.getValue());
    }
    FileInputFormat.addInputPath(lp, new Path(inputDir + "/page_views"));
    FileOutputFormat.setOutputPath(lp, new Path(outputDir + "/L15out"));
    lp.setNumReduceTasks(Integer.parseInt(parallel));
    Job group = new Job(lp);

    JobControl jc = new JobControl("L15 join");
    jc.addJob(group);

    new Thread(jc).start();

    int i = 0;
    while (!jc.allFinished()) {
        ArrayList<Job> failures = jc.getFailedJobs();
        if (failures != null && failures.size() > 0) {
            for (Job failure : failures) {
                System.err.println(failure.getMessage());
            }
            break;
        }

        try {
            Thread.sleep(5000);
        } catch (InterruptedException e) {
        }

        if (i % 10000 == 0) {
            System.out.println("Running jobs");
            ArrayList<Job> running = jc.getRunningJobs();
            if (running != null && running.size() > 0) {
                for (Job r : running) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Ready jobs");
            ArrayList<Job> ready = jc.getReadyJobs();
            if (ready != null && ready.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Waiting jobs");
            ArrayList<Job> waiting = jc.getWaitingJobs();
            if (waiting != null && waiting.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Successful jobs");
            ArrayList<Job> success = jc.getSuccessfulJobs();
            if (success != null && success.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
        }
        i++;
    }
    ArrayList<Job> failures = jc.getFailedJobs();
    if (failures != null && failures.size() > 0) {
        for (Job failure : failures) {
            System.err.println(failure.getMessage());
        }
    }
    jc.stop();
}

From source file:org.apache.pig.test.pigmix.mapreduce.L16.java

License:Apache License

public static void main(String[] args) throws IOException {

    if (args.length != 3) {
        System.out.println("Parameters: inputDir outputDir parallel");
        System.exit(1);/*from www. ja va  2 s  .c  o m*/
    }
    String inputDir = args[0];
    String outputDir = args[1];
    String parallel = args[2];
    JobConf lp = new JobConf(L16.class);
    lp.setJobName("L16 Load Page Views");
    lp.setInputFormat(TextInputFormat.class);
    lp.setOutputKeyClass(Text.class);
    lp.setOutputValueClass(Text.class);
    lp.setMapperClass(ReadPageViews.class);
    lp.setReducerClass(Group.class);
    Properties props = System.getProperties();
    for (Map.Entry<Object, Object> entry : props.entrySet()) {
        lp.set((String) entry.getKey(), (String) entry.getValue());
    }
    FileInputFormat.addInputPath(lp, new Path(inputDir + "/page_views"));
    FileOutputFormat.setOutputPath(lp, new Path(outputDir + "/L16out"));
    lp.setNumReduceTasks(Integer.parseInt(parallel));
    Job group = new Job(lp);

    JobControl jc = new JobControl("L16 join");
    jc.addJob(group);

    new Thread(jc).start();

    int i = 0;
    while (!jc.allFinished()) {
        ArrayList<Job> failures = jc.getFailedJobs();
        if (failures != null && failures.size() > 0) {
            for (Job failure : failures) {
                System.err.println(failure.getMessage());
            }
            break;
        }

        try {
            Thread.sleep(5000);
        } catch (InterruptedException e) {
        }

        if (i % 10000 == 0) {
            System.out.println("Running jobs");
            ArrayList<Job> running = jc.getRunningJobs();
            if (running != null && running.size() > 0) {
                for (Job r : running) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Ready jobs");
            ArrayList<Job> ready = jc.getReadyJobs();
            if (ready != null && ready.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Waiting jobs");
            ArrayList<Job> waiting = jc.getWaitingJobs();
            if (waiting != null && waiting.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Successful jobs");
            ArrayList<Job> success = jc.getSuccessfulJobs();
            if (success != null && success.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
        }
        i++;
    }
    ArrayList<Job> failures = jc.getFailedJobs();
    if (failures != null && failures.size() > 0) {
        for (Job failure : failures) {
            System.err.println(failure.getMessage());
        }
    }
    jc.stop();
}

From source file:org.apache.pig.test.pigmix.mapreduce.L17.java

License:Apache License

public static void main(String[] args) throws IOException {

    if (args.length != 3) {
        System.out.println("Parameters: inputDir outputDir parallel");
        System.exit(1);//ww  w  .  ja va 2 s.c  o  m
    }
    String inputDir = args[0];
    String outputDir = args[1];
    String parallel = args[2];
    JobConf lp = new JobConf(L17.class);
    lp.setJobName("L17 Wide group by");
    lp.setInputFormat(TextInputFormat.class);
    lp.setOutputKeyClass(Text.class);
    lp.setOutputValueClass(Text.class);
    lp.setMapperClass(ReadPageViews.class);
    lp.setCombinerClass(Combiner.class);
    lp.setReducerClass(Group.class);
    Properties props = System.getProperties();
    for (Map.Entry<Object, Object> entry : props.entrySet()) {
        lp.set((String) entry.getKey(), (String) entry.getValue());
    }
    FileInputFormat.addInputPath(lp, new Path(inputDir + "/widegroupbydata"));
    FileOutputFormat.setOutputPath(lp, new Path(outputDir + "/L17out"));
    lp.setNumReduceTasks(Integer.parseInt(parallel));
    Job group = new Job(lp);

    JobControl jc = new JobControl("L17 group by");
    jc.addJob(group);

    new Thread(jc).start();

    int i = 0;
    while (!jc.allFinished()) {
        ArrayList<Job> failures = jc.getFailedJobs();
        if (failures != null && failures.size() > 0) {
            for (Job failure : failures) {
                System.err.println(failure.getMessage());
            }
            break;
        }

        try {
            Thread.sleep(5000);
        } catch (InterruptedException e) {
        }

        if (i % 10000 == 0) {
            System.out.println("Running jobs");
            ArrayList<Job> running = jc.getRunningJobs();
            if (running != null && running.size() > 0) {
                for (Job r : running) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Ready jobs");
            ArrayList<Job> ready = jc.getReadyJobs();
            if (ready != null && ready.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Waiting jobs");
            ArrayList<Job> waiting = jc.getWaitingJobs();
            if (waiting != null && waiting.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
            System.out.println("Successful jobs");
            ArrayList<Job> success = jc.getSuccessfulJobs();
            if (success != null && success.size() > 0) {
                for (Job r : ready) {
                    System.out.println(r.getJobName());
                }
            }
        }
        i++;
    }
    ArrayList<Job> failures = jc.getFailedJobs();
    if (failures != null && failures.size() > 0) {
        for (Job failure : failures) {
            System.err.println(failure.getMessage());
        }
    }
    jc.stop();
}