Example usage for org.apache.hadoop.mapred RunningJob getJobID

List of usage examples for org.apache.hadoop.mapred RunningJob getJobID

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred RunningJob getJobID.

Prototype

@Deprecated
public String getJobID();

Source Link

Usage

From source file:Brush.BrushAssembler.java

License:Apache License

public void end(RunningJob job) throws IOException {
    long endtime = System.currentTimeMillis();
    long diff = (endtime - JOBSTARTTIME) / 1000;

    msg(job.getJobID() + " " + diff + " s");

    if (!job.isSuccessful()) {
        System.out.println("Job was not successful");
        System.exit(1);//w  w w.  j  a  v  a2  s.com
    }
}

From source file:com.cloudera.jobsub.JobClientTracer.java

License:Apache License

public void submittedJob(RunningJob job) {
    reportStream.println(job.getJobID());
    reportStream.flush();
}

From source file:com.jackbe.mapreduce.LocalJobManager.java

License:Open Source License

public RunningJob startJob(String inputDir, String outputDir, String mapperScript, String reducerScript,
        String combinerScript) throws Exception {

    init();//from ww  w .  ja va  2 s  .c  o m
    conf.setJobName("EMMLMapReduce");
    //conf.setSessionId(Long.toString(System.currentTimeMillis()));

    conf.set("MAPPER_SCRIPT", mapperScript);
    conf.set("REDUCER_SCRIPT", reducerScript);
    if (combinerScript != null) {
        conf.set("COMBINER_SCRIPT", combinerScript);
        conf.setCombinerClass(EMMLCombiner.class);
    }

    //      FileInputFormat.setInputPaths(conf, new Path(inputDir));
    FileInputFormat.setInputPaths(conf, new Path("hdfs://" + NAMENODE + "/" + inputDir));
    //      FileOutputFormat.setOutputPath(conf, new Path(outputDir));
    Path outputPath = new Path("hdfs://" + NAMENODE + "/" + outputDir);
    outputPath.getFileSystem(conf).delete(outputPath, true);
    FileOutputFormat.setOutputPath(conf, outputPath);
    RESTRegistrationJobCallback callback = new RESTRegistrationJobCallback(outputPath, outputDir, conf);

    RunningJob job = null;
    try {
        job = jobClient.submitJob(conf);
        this.registerJobCompleteCallback(job, callback);

        statusMap.put(job.getJobID(), job);
    } catch (IOException e) {
        e.printStackTrace();
        throw e;
    }
    jobClient.getSystemDir();
    return job;
}

From source file:kafka.etl.impl.SimpleKafkaETLJob.java

License:Apache License

public void execute() throws Exception {
    JobConf conf = createJobConf();/*from  ww  w .  j  a v  a 2s .  co m*/
    RunningJob runningJob = new JobClient(conf).submitJob(conf);
    String id = runningJob.getJobID();
    System.out.println("Hadoop job id=" + id);
    runningJob.waitForCompletion();

    if (!runningJob.isSuccessful())
        throw new Exception("Hadoop ETL job failed! Please check status on http://"
                + conf.get("mapred.job.tracker") + "/jobdetails.jsp?jobid=" + id);
}

From source file:nthu.scopelab.tsqr.ssvd.BtJob.java

License:Apache License

public static void run(Configuration conf, Path[] inputPath, Path btPath, String qrfPath, int k, int p,
        int outerBlockHeight, int reduceTasks, boolean outputBBtProducts, String reduceSchedule, int mis)
        throws Exception {
    boolean outputQ = true;

    String stages[] = reduceSchedule.split(",");

    JobConf job = new JobConf(conf, BtJob.class);
    job.setInputFormat(SequenceFileInputFormat.class);
    job.setOutputFormat(SequenceFileOutputFormat.class);
    job.setInt(SCHEDULE_NUM, stages.length);
    job.setInt(PROP_OUTER_PROD_BLOCK_HEIGHT, outerBlockHeight);
    job.setInt(QJob.PROP_K, k);//  w w w  . java2s  . c  o  m
    job.setInt(QJob.PROP_P, p);
    job.setBoolean(QmultiplyJob.OUTPUT_Q, outputQ);
    job.setBoolean(PROP_OUPTUT_BBT_PRODUCTS, outputBBtProducts);
    job.set(QmultiplyJob.QRF_DIR, qrfPath);
    FileSystem.get(job).delete(btPath, true);

    FileOutputFormat.setOutputPath(job, btPath);

    FileOutputFormat.setCompressOutput(job, true);
    FileOutputFormat.setOutputCompressorClass(job, DefaultCodec.class);
    SequenceFileOutputFormat.setOutputCompressionType(job, CompressionType.BLOCK);

    job.setJobName("BtJob");

    job.setMapOutputKeyClass(IntWritable.class);
    job.setMapOutputValueClass(SparseRowBlockWritable.class);
    job.setOutputKeyClass(IntWritable.class);
    //job.setOutputValueClass(SparseRowBlockWritable.class);
    job.setOutputValueClass(VectorWritable.class);

    job.setMapperClass(BtMapper.class);
    job.setCombinerClass(OuterProductCombiner.class);
    job.setReducerClass(OuterProductReducer.class);

    fileGather fgather = new fileGather(inputPath, "", FileSystem.get(job));
    mis = Checker.checkMis(mis, fgather.getInputSize(), FileSystem.get(job));
    job.setNumMapTasks(fgather.recNumMapTasks(mis));

    //job.setNumReduceTasks(0);
    job.setNumReduceTasks(reduceTasks);

    FileInputFormat.setInputPaths(job, inputPath);

    if (outputQ) {
        MultipleOutputs.addNamedOutput(job, QmultiplyJob.Q_MAT, SequenceFileOutputFormat.class,
                IntWritable.class, LMatrixWritable.class);
    }
    if (outputBBtProducts) {
        MultipleOutputs.addNamedOutput(job, OUTPUT_BBT, SequenceFileOutputFormat.class, IntWritable.class,
                VectorWritable.class);
    }
    RunningJob rj = JobClient.runJob(job);
    System.out.println("Btjob Job ID: " + rj.getJobID().toString());
}

From source file:nthu.scopelab.tsqr.ssvd.itBtJob.java

License:Apache License

public static void run(Configuration conf, Path[] inputPath, Path btPath, String qrfPath, int k, int p,
        int outerBlockHeight, int reduceTasks, boolean outputBBtProducts, String reduceSchedule, int mis)
        throws Exception {
    boolean outputQ = true;

    String stages[] = reduceSchedule.split(",");

    JobConf job = new JobConf(conf, itBtJob.class);
    job.setInputFormat(SequenceFileInputFormat.class);
    job.setOutputFormat(SequenceFileOutputFormat.class);
    job.setInt(SCHEDULE_NUM, stages.length);
    job.setInt(PROP_OUTER_PROD_BLOCK_HEIGHT, outerBlockHeight);
    job.setInt(QJob.PROP_K, k);//  ww  w .j av  a2s  .  c  o m
    job.setInt(QJob.PROP_P, p);
    job.setBoolean(QmultiplyJob.OUTPUT_Q, outputQ);
    job.setBoolean(PROP_OUPTUT_BBT_PRODUCTS, outputBBtProducts);
    job.set(QmultiplyJob.QRF_DIR, qrfPath);
    FileSystem.get(job).delete(btPath, true);

    FileOutputFormat.setOutputPath(job, btPath);

    FileOutputFormat.setCompressOutput(job, true);
    FileOutputFormat.setOutputCompressorClass(job, DefaultCodec.class);
    SequenceFileOutputFormat.setOutputCompressionType(job, CompressionType.BLOCK);

    job.setJobName("itBtJob");

    job.setMapOutputKeyClass(IntWritable.class);
    job.setMapOutputValueClass(SparseRowBlockWritable.class);
    job.setOutputKeyClass(IntWritable.class);
    //job.setOutputValueClass(SparseRowBlockWritable.class);
    job.setOutputValueClass(VectorWritable.class);

    job.setMapperClass(BtMapper.class);
    job.setCombinerClass(OuterProductCombiner.class);
    job.setReducerClass(OuterProductReducer.class);

    fileGather fgather = new fileGather(inputPath, "", FileSystem.get(job));
    mis = Checker.checkMis(mis, fgather.getInputSize(), FileSystem.get(job));
    job.setNumMapTasks(fgather.recNumMapTasks(mis));

    //job.setNumReduceTasks(0);
    job.setNumReduceTasks(reduceTasks);

    FileInputFormat.setInputPaths(job, inputPath);

    if (outputQ) {
        MultipleOutputs.addNamedOutput(job, QmultiplyJob.Q_MAT, SequenceFileOutputFormat.class,
                IntWritable.class, LMatrixWritable.class);
    }
    if (outputBBtProducts) {
        MultipleOutputs.addNamedOutput(job, OUTPUT_BBT, SequenceFileOutputFormat.class, IntWritable.class,
                VectorWritable.class);
    }
    RunningJob rj = JobClient.runJob(job);
    System.out.println("itBtJob Job ID: " + rj.getJobID().toString());
}

From source file:nthu.scopelab.tsqr.ssvd.itQJob.java

License:Apache License

public static void run(Configuration conf, Path[] inputPaths, String outputPath, String reduceSchedule, int k,
        int p, long seed, int mis) throws ClassNotFoundException, InterruptedException, IOException {

    String stages[] = reduceSchedule.split(",");
    String rinput = "";
    String routput = outputPath + "/iter-r-";

    for (int i = 0; i < stages.length; i++) {
        String thenumber = Integer.toString(i + 1);
        JobConf job = new JobConf(conf, itQJob.class);
        job.setJobName("itQ-job-" + thenumber);
        job.setInputFormat(SequenceFileInputFormat.class);
        job.setOutputFormat(SequenceFileOutputFormat.class);

        if (i == 0)
            job.setMapperClass(QMapper.class);
        else//from  w w  w. j  a v  a  2 s  .  c om
            job.setMapperClass(IdentityMapper.class);

        job.setReducerClass(QReducer.class);
        job.setOutputKeyClass(IntWritable.class);
        job.setOutputValueClass(LMatrixWritable.class);

        FileSystem fs = FileSystem.get(job);
        Path Paths[];
        fileGather fgather = null;
        if (i == 0)
            fgather = new fileGather(inputPaths, "part", fs);
        else
            fgather = new fileGather(new Path(rinput), "part", fs);
        Paths = fgather.getPaths();
        mis = Checker.checkMis(mis, fgather.getInputSize(), fs);
        job.setNumMapTasks(fgather.recNumMapTasks(mis));

        job.setNumReduceTasks(Integer.parseInt(stages[i]));

        job.setInt(QRFirstJob.COLUMN_SIZE, k + p);
        job.setLong(PROP_OMEGA_SEED, seed);
        job.setInt(PROP_K, k);
        job.setInt(PROP_P, p);

        fs.delete(new Path(routput + thenumber), true);

        FileInputFormat.setInputPaths(job, Paths);

        FileOutputFormat.setOutputPath(job, new Path(routput + thenumber));

        //FileOutputFormat.setCompressOutput(job, true);
        //FileOutputFormat.setOutputCompressorClass(job, DefaultCodec.class);
        //SequenceFileOutputFormat.setOutputCompressionType(job,CompressionType.BLOCK);
        //output first level Q
        MultipleOutputs.addNamedOutput(job, QF_MAT, SequenceFileOutputFormat.class, IntWritable.class,
                LMatrixWritable.class);

        RunningJob rj = JobClient.runJob(job);
        System.out.println("itQJob Job ID: " + rj.getJobID().toString());
        rinput = routput + thenumber;
    }
}

From source file:nthu.scopelab.tsqr.ssvd.QJob.java

License:Apache License

public static void run(Configuration conf, Path[] inputPaths, String outputPath, String reduceSchedule, int k,
        int p, long seed, int mis) throws ClassNotFoundException, InterruptedException, IOException {

    String stages[] = reduceSchedule.split(",");
    String rinput = "";
    String routput = outputPath + "/iter-r-";

    for (int i = 0; i < stages.length; i++) {
        String thenumber = Integer.toString(i + 1);
        JobConf job = new JobConf(conf, QJob.class);
        job.setJobName("Q-job-" + thenumber);
        job.setInputFormat(SequenceFileInputFormat.class);
        job.setOutputFormat(SequenceFileOutputFormat.class);

        if (i == 0)
            job.setMapperClass(QMapper.class);
        else/*  w w w  .  j av  a2  s  . c  o m*/
            job.setMapperClass(IdentityMapper.class);

        job.setReducerClass(QReducer.class);
        job.setOutputKeyClass(IntWritable.class);
        job.setOutputValueClass(LMatrixWritable.class);

        FileSystem fs = FileSystem.get(job);
        Path Paths[];
        fileGather fgather = null;
        if (i == 0)
            fgather = new fileGather(inputPaths, "part", fs);
        else
            fgather = new fileGather(new Path(rinput), "part", fs);
        Paths = fgather.getPaths();
        mis = Checker.checkMis(mis, fgather.getInputSize(), fs);
        job.setNumMapTasks(fgather.recNumMapTasks(mis));

        job.setNumReduceTasks(Integer.parseInt(stages[i]));

        job.setInt(QRFirstJob.COLUMN_SIZE, k + p);
        job.setLong(PROP_OMEGA_SEED, seed);
        job.setInt(PROP_K, k);
        job.setInt(PROP_P, p);

        fs.delete(new Path(routput + thenumber), true);

        FileInputFormat.setInputPaths(job, Paths);

        FileOutputFormat.setOutputPath(job, new Path(routput + thenumber));

        //FileOutputFormat.setCompressOutput(job, true);
        //FileOutputFormat.setOutputCompressorClass(job, DefaultCodec.class);
        //SequenceFileOutputFormat.setOutputCompressionType(job,CompressionType.BLOCK);
        //output first level Q
        MultipleOutputs.addNamedOutput(job, QF_MAT, SequenceFileOutputFormat.class, IntWritable.class,
                LMatrixWritable.class);

        RunningJob rj = JobClient.runJob(job);
        System.out.println("QJob Job ID: " + rj.getJobID().toString());
        rinput = routput + thenumber;
    }
}

From source file:org.apache.oozie.action.hadoop.TestJavaActionExecutor.java

License:Apache License

public void testRecovery() throws Exception {
    final String actionXml = "<java>" + "<job-tracker>" + getJobTrackerUri() + "</job-tracker>" + "<name-node>"
            + getNameNodeUri() + "</name-node>" + "<main-class>" + LauncherMainTester.class.getName()
            + "</main-class>" + "</java>";
    final Context context = createContext(actionXml, null);
    RunningJob runningJob = submitAction(context);
    String launcherId = context.getAction().getExternalId();

    waitFor(60 * 1000, new Predicate() {
        @Override/* w  w  w  .jav  a2  s  . c o m*/
        public boolean evaluate() throws Exception {
            JavaActionExecutor ae = new JavaActionExecutor();
            Configuration conf = ae.createBaseHadoopConf(context, XmlUtils.parseXml(actionXml));
            return LauncherMapperHelper.getRecoveryId(conf, context.getActionDir(),
                    context.getRecoveryId()) != null;
        }
    });

    final RunningJob runningJob2 = submitAction(context);

    assertEquals(launcherId, runningJob2.getJobID().toString());
    assertEquals(launcherId, context.getAction().getExternalId());

    waitFor(60 * 1000, new Predicate() {
        @Override
        public boolean evaluate() throws Exception {
            return runningJob2.isComplete();
        }
    });
    assertTrue(runningJob.isSuccessful());
    ActionExecutor ae = new JavaActionExecutor();
    ae.check(context, context.getAction());
    assertEquals("SUCCEEDED", context.getAction().getExternalStatus());
    assertNull(context.getAction().getData());

    ae.end(context, context.getAction());
    assertEquals(WorkflowAction.Status.OK, context.getAction().getStatus());
}

From source file:org.apache.oozie.action.hadoop.TestLauncher.java

License:Apache License

private RunningJob _test(String... arg) throws Exception {
    Path actionDir = getFsTestCaseDir();

    File jar = IOUtils.createJar(new File(getTestCaseDir()), "launcher.jar", LauncherMapper.class,
            LauncherMainException.class, LauncherSecurityManager.class, LauncherException.class,
            LauncherMainTester.class);

    FileSystem fs = getFileSystem();

    Path launcherJar = new Path(actionDir, "launcher.jar");
    fs.copyFromLocalFile(new Path(jar.toString()), launcherJar);

    JobConf jobConf = Services.get().get(HadoopAccessorService.class)
            .createJobConf(new URI(getNameNodeUri()).getAuthority());
    //        jobConf.setJar(jar.getAbsolutePath());
    jobConf.set("user.name", getTestUser());
    jobConf.setInt("mapred.map.tasks", 1);
    jobConf.setInt("mapred.map.max.attempts", 1);
    jobConf.setInt("mapred.reduce.max.attempts", 1);

    jobConf.set("mapreduce.framework.name", "yarn");
    jobConf.set("mapred.job.tracker", getJobTrackerUri());
    jobConf.set("fs.default.name", getNameNodeUri());

    LauncherMapperHelper.setupMainClass(jobConf, LauncherMainTester.class.getName());
    LauncherMapperHelper.setupMainArguments(jobConf, arg);

    Configuration actionConf = new XConfiguration();
    LauncherMapperHelper.setupLauncherInfo(jobConf, "1", "1@a", actionDir, "1@a-0", actionConf, "");
    LauncherMapperHelper.setupYarnRestartHandling(jobConf, jobConf, "1@a", System.currentTimeMillis());

    assertEquals("1", actionConf.get("oozie.job.id"));
    assertEquals("1@a", actionConf.get("oozie.action.id"));

    DistributedCache.addFileToClassPath(new Path(launcherJar.toUri().getPath()), jobConf);

    JobClient jobClient = createJobClient();

    final RunningJob runningJob = jobClient.submitJob(jobConf);

    System.out.println("Action Dir: " + actionDir);
    System.out.println("LauncherMapper ID: " + runningJob.getJobID().toString());

    waitFor(180 * 1000, new Predicate() {
        public boolean evaluate() throws Exception {
            return runningJob.isComplete();
        }//from   w  w w  .j av  a 2  s  .co m
    });

    assertTrue(jobConf.get("oozie.action.prepare.xml").equals(""));
    return runningJob;

}