Example usage for org.apache.hadoop.mapreduce Job killJob

List of usage examples for org.apache.hadoop.mapreduce Job killJob

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Job killJob.

Prototype

public void killJob() throws IOException 

Source Link

Document

Kill the running job.

Usage

From source file:co.cask.cdap.hbase.wd.RowKeyDistributorTestBase.java

License:Apache License

private void testMapReduceInternal(long origKeyPrefix, Scan scan, int numValues, int startWithValue,
        int seekIntervalMinValue, int seekIntervalMaxValue)
        throws IOException, InterruptedException, ClassNotFoundException {
    int valuesCountInSeekInterval = writeTestData(origKeyPrefix, numValues, startWithValue,
            seekIntervalMinValue, seekIntervalMaxValue);

    // Reading data
    Configuration conf = new Configuration(testingUtility.getConfiguration());
    conf.set("fs.defaultFS", "file:///");
    conf.set("fs.default.name", "file:///");
    conf.setInt("mapreduce.local.map.tasks.maximum", 16);
    conf.setInt("mapreduce.local.reduce.tasks.maximum", 16);
    Job job = Job.getInstance(conf, "testMapReduceInternal()-Job");
    TableMapReduceUtil.initTableMapperJob(TABLE_NAME, scan, RowCounterMapper.class,
            ImmutableBytesWritable.class, Result.class, job);

    // Substituting standard TableInputFormat which was set in TableMapReduceUtil.initTableMapperJob(...)
    job.setInputFormatClass(WdTableInputFormat.class);
    keyDistributor.addInfo(job.getConfiguration());

    job.setOutputFormatClass(NullOutputFormat.class);
    job.setNumReduceTasks(0);/*from  w w  w.j  av a  2 s .c o m*/

    boolean succeeded = job.waitForCompletion(true);
    Assert.assertTrue(succeeded);

    long mapInputRecords = job.getCounters().findCounter(RowCounterMapper.Counters.ROWS).getValue();
    Assert.assertEquals(valuesCountInSeekInterval, mapInputRecords);

    // Need to kill the job after completion, after it could leave MRAppMaster running not terminated.
    // Not sure what causing this, but maybe problem in MiniYarnCluster
    job.killJob();
}

From source file:com.ikanow.aleph2.analytics.hadoop.services.HadoopTechnologyService.java

License:Apache License

@Override
public CompletableFuture<BasicMessageBean> stopAnalyticJob(DataBucketBean analytic_bucket,
        Collection<AnalyticThreadJobBean> jobs, AnalyticThreadJobBean job_to_stop, IAnalyticsContext context) {
    try {// www  .ja  v a2 s  .c  om
        final Cluster cluster = new Cluster(_config.get());
        final String job_name = BucketUtils.getUniqueSignature(analytic_bucket.full_name(),
                Optional.ofNullable(job_to_stop.name()));
        return Arrays.stream(cluster.getAllJobStatuses())
                .filter(job_status -> job_status.getJobName().equals(job_name)).findFirst()
                .map(Lambdas.wrap_u(job_status -> {
                    final Job job = cluster.getJob(job_status.getJobID());
                    job.killJob();
                    return CompletableFuture
                            .completedFuture(ErrorUtils.buildSuccessMessage(this.getClass().getSimpleName(),
                                    "stopAnalyticJob", analytic_bucket.full_name() + ":" + job_to_stop.name()));
                })).get() // (Will throw if not found falling through to catch below)
        ;
    } catch (Throwable t) {
        return CompletableFuture.completedFuture(
                ErrorUtils.buildSuccessMessage(this.getClass().getSimpleName(), "stopAnalyticJob",
                        HadoopErrorUtils.JOB_STOP_ERROR, job_to_stop.name(), analytic_bucket.full_name()));
    }
}

From source file:DataCubeRefresh.Grep.java

License:Apache License

/**
 * Run function./*from   ww w  . ja v a2 s.com*/
 * @param args arguments
 * @return error code
 * @throws Exception if an exception occurs
 */
public int run(String[] args) throws Exception {
    if (args.length < 3) {
        System.out.println("Grep <inUrl> <outUrl> <regex> [<group>]");
        ToolRunner.printGenericCommandUsage(System.out);
        return -1;
    }

    Job grepJob = new Job(getConf());
    Job sortJob = new Job(getConf());

    String tempStreamTag = UUID.randomUUID().toString();

    try {
        grepJob.setJobName("grep-search");

        TextHStreamingInputFormat.addInputStream(grepJob, 1000, 600, -1, "", false, args[0]);
        HStreamingJobConf.setIsStreamingJob(grepJob, true);
        grepJob.setMapperClass(RegexMapper.class);
        grepJob.getConfiguration().set("mapred.mapper.regex", args[2]);
        if (args.length == 4)
            grepJob.getConfiguration().set("mapred.mapper.regex.group", args[3]);

        grepJob.setCombinerClass(LongSumReducer.class);
        grepJob.setReducerClass(LongSumReducer.class);
        grepJob.setInputFormatClass(TextHStreamingInputFormat.class);
        grepJob.setOutputFormatClass(TextHStreamingOutputFormat.class);
        HStreamingOutputFormat.setOutputStreamTag(grepJob, tempStreamTag);
        grepJob.setOutputKeyClass(Text.class);
        grepJob.setOutputValueClass(LongWritable.class);
        grepJob.setJobName("grep-search");
        grepJob.setJarByClass(this.getClass());

        grepJob.submit();

        sortJob.setJobName("grep-sort");
        sortJob.setInputFormatClass(TextHStreamingInputFormat.class);
        HStreamingJobConf.setIsStreamingJob(sortJob, true);

        // add previous stream partition/reducer 0 as input. 
        HStreamingInputFormat.addInputStreamTag(sortJob, tempStreamTag, 0);

        sortJob.setMapperClass(InverseTextMapper.class);
        sortJob.setNumReduceTasks(1); // single output stream
        sortJob.setOutputFormatClass(TextHStreamingOutputFormat.class);
        TextHStreamingOutputFormat.setOutputPath(sortJob, args[1]);
        sortJob.setSortComparatorClass( // sort by decreasing fre
                LongWritable.DecreasingComparator.class);
        sortJob.setJarByClass(this.getClass());
        sortJob.submit();

        return sortJob.waitForCompletion(true) ? 0 : 1;
    } catch (Exception e) {
        e.printStackTrace();
        try {
            grepJob.killJob();
        } catch (Exception e1) {
            // ignore
        }
        try {
            sortJob.killJob();
        } catch (Exception e2) {
            // ignore
        }
    }
    return 0;
}

From source file:edu.iu.ccd.CCDLauncher.java

License:Apache License

private void runCCD(Path inputDir, int r, double lambda, int numIterations, int numMapTasks,
        int numThreadsPerWorker, int numModelSlices, Path modelDir, Path outputDir, String testFilePath,
        Configuration configuration)
        throws IOException, URISyntaxException, InterruptedException, ClassNotFoundException {
    System.out.println("Starting Job");
    int jobID = 0;
    do {//from   w w  w  . j  ava2 s .c  o  m
        // --------------------------------------------
        long perJobSubmitTime = System.currentTimeMillis();
        System.out.println("Start Job#" + jobID + " "
                + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
        Job ccdJob = configureCCDJob(inputDir, r, lambda, numIterations, numMapTasks, numThreadsPerWorker,
                numModelSlices, modelDir, outputDir, testFilePath, configuration, jobID);
        boolean jobSuccess = ccdJob.waitForCompletion(true);
        System.out.println("End Jod#" + jobID + " "
                + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
        System.out.println("| Job#" + jobID + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime)
                + " miliseconds |");
        // ----------------------------------------
        if (!jobSuccess) {
            ccdJob.killJob();
            System.out.println("CCD Job failed. Job ID:" + jobID);
            jobID++;
            if (jobID == 3) {
                break;
            }
        } else {
            break;
        }
    } while (true);
}

From source file:edu.iu.daal_adaboost.ADABOOSTDaalLauncher.java

License:Apache License

/**
 * Launches all the tasks in order.//from  w w w  . j  av  a2 s .c  om
 */
@Override
public int run(String[] args) throws Exception {

    /* Put shared libraries into the distributed cache */
    Configuration conf = this.getConf();

    Initialize init = new Initialize(conf, args);

    /* Put shared libraries into the distributed cache */
    init.loadDistributedLibs();

    // load args
    init.loadSysArgs();

    //load app args
    conf.setInt(HarpDAALConstants.FEATURE_DIM, Integer.parseInt(args[init.getSysArgNum()]));
    conf.setInt(HarpDAALConstants.FILE_DIM, Integer.parseInt(args[init.getSysArgNum() + 1]));
    conf.set(HarpDAALConstants.TEST_FILE_PATH, args[init.getSysArgNum() + 2]);

    // launch job
    System.out.println("Starting Job");
    long perJobSubmitTime = System.currentTimeMillis();
    System.out.println(
            "Start Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    Job adaboostJob = init.createJob("AdaboostJob", ADABOOSTDaalLauncher.class,
            ADABOOSTDaalCollectiveMapper.class);

    // finish job
    boolean jobSuccess = adaboostJob.waitForCompletion(true);
    System.out.println(
            "End Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    System.out.println(
            "| Job#" + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime) + " miliseconds |");
    if (!jobSuccess) {
        adaboostJob.killJob();
        System.out.println("Adaboost Job failed");
    }

    return 0;
}

From source file:edu.iu.daal_als.ALSDaalLauncher.java

License:Apache License

/**
 * Launches ALS workers.//from  w  w  w .j a v a 2  s  . c om
 */
@Override
public int run(String[] args) throws Exception {

    /* Put shared libraries into the distributed cache */
    Configuration conf = this.getConf();

    Initialize init = new Initialize(conf, args);

    /* Put shared libraries into the distributed cache */
    init.loadDistributedLibs();

    // load args
    init.loadSysArgs();

    conf.setInt(HarpDAALConstants.NUM_FACTOR, Integer.parseInt(args[init.getSysArgNum()]));
    conf.setDouble(Constants.ALPHA, Double.parseDouble(args[init.getSysArgNum() + 1]));
    conf.setDouble(Constants.LAMBDA, Double.parseDouble(args[init.getSysArgNum() + 2]));
    conf.set(HarpDAALConstants.TEST_FILE_PATH, args[init.getSysArgNum() + 3]);

    // launch job
    System.out.println("Starting Job");
    long perJobSubmitTime = System.currentTimeMillis();
    System.out.println(
            "Start Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));

    Job alsJob = init.createJob("alsJob", ALSDaalLauncher.class, ALSDaalCollectiveMapper.class);

    // finish job
    boolean jobSuccess = alsJob.waitForCompletion(true);
    System.out.println(
            "End Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    System.out.println(
            "| Job#" + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime) + " miliseconds |");
    if (!jobSuccess) {
        alsJob.killJob();
        System.out.println("alsJob failed");
    }

    return 0;
}

From source file:edu.iu.daal_als_batch.ALSBatchDaalLauncher.java

License:Apache License

/**
 * Launches all the tasks in order./*from   w  w w .j av  a2 s.  c  o m*/
 */
@Override
public int run(String[] args) throws Exception {

    /* Put shared libraries into the distributed cache */
    Configuration conf = this.getConf();

    Initialize init = new Initialize(conf, args);

    /* Put shared libraries into the distributed cache */
    init.loadDistributedLibs();

    // load args
    init.loadSysArgs();

    //load app args
    conf.setInt(HarpDAALConstants.FILE_DIM, Integer.parseInt(args[init.getSysArgNum()]));
    conf.setInt(HarpDAALConstants.NUM_FACTOR, Integer.parseInt(args[init.getSysArgNum() + 1]));

    // launch job
    System.out.println("Starting Job");
    long perJobSubmitTime = System.currentTimeMillis();
    System.out.println(
            "Start Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    Job alsbatchJob = init.createJob("AlsbatchJob", ALSBatchDaalLauncher.class,
            ALSBatchDaalCollectiveMapper.class);

    // finish job
    boolean jobSuccess = alsbatchJob.waitForCompletion(true);
    System.out.println(
            "End Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    System.out.println(
            "| Job#" + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime) + " miliseconds |");
    if (!jobSuccess) {
        alsbatchJob.killJob();
        System.out.println("ALSbatch Job failed");
    }

    return 0;
}

From source file:edu.iu.daal_ar.Aprior.ARDaalLauncher.java

License:Apache License

/**
 * Launches all the tasks in order./*from   ww  w  .j a  v a2  s  . c o  m*/
 */
@Override
public int run(String[] args) throws Exception {

    /* Put shared libraries into the distributed cache */
    Configuration conf = this.getConf();

    Initialize init = new Initialize(conf, args);

    /* Put shared libraries into the distributed cache */
    init.loadDistributedLibs();

    // load args
    init.loadSysArgs();

    //load app args
    conf.setInt(HarpDAALConstants.FILE_DIM, Integer.parseInt(args[init.getSysArgNum()]));
    conf.setDouble(Constants.MIN_SUPPORT, Double.parseDouble(args[init.getSysArgNum() + 1]));
    conf.setDouble(Constants.MIN_CONFIDENCE, Double.parseDouble(args[init.getSysArgNum() + 2]));

    // launch job
    System.out.println("Starting Job");
    long perJobSubmitTime = System.currentTimeMillis();
    System.out.println(
            "Start Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));

    Job arbatchJob = init.createJob("arbatchJob", ARDaalLauncher.class, ARDaalCollectiveMapper.class);

    // finish job
    boolean jobSuccess = arbatchJob.waitForCompletion(true);
    System.out.println(
            "End Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    System.out.println(
            "| Job#" + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime) + " miliseconds |");
    if (!jobSuccess) {
        arbatchJob.killJob();
        System.out.println("ArBatchJob Job failed");
    }

    return 0;
}

From source file:edu.iu.daal_brownboost.BROWNBOOSTDaalLauncher.java

License:Apache License

/**
 * Launches all the tasks in order.//  w  w  w.j a v  a  2s .  c om
 */
@Override
public int run(String[] args) throws Exception {

    /* Put shared libraries into the distributed cache */
    Configuration conf = this.getConf();

    Initialize init = new Initialize(conf, args);

    /* Put shared libraries into the distributed cache */
    init.loadDistributedLibs();

    // load args
    init.loadSysArgs();

    //load app args
    conf.setInt(HarpDAALConstants.FEATURE_DIM, Integer.parseInt(args[init.getSysArgNum()]));
    conf.setInt(HarpDAALConstants.FILE_DIM, Integer.parseInt(args[init.getSysArgNum() + 1]));
    conf.set(HarpDAALConstants.TEST_FILE_PATH, args[init.getSysArgNum() + 2]);

    // launch job
    System.out.println("Starting Job");
    long perJobSubmitTime = System.currentTimeMillis();
    System.out.println(
            "Start Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));

    Job brownboostJob = init.createJob("brownboostJob", BROWNBOOSTDaalLauncher.class,
            BROWNBOOSTDaalCollectiveMapper.class);

    // finish job
    boolean jobSuccess = brownboostJob.waitForCompletion(true);
    System.out.println(
            "End Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    System.out.println(
            "| Job#" + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime) + " miliseconds |");
    if (!jobSuccess) {
        brownboostJob.killJob();
        System.out.println("brownboostJob failed");
    }

    return 0;
}

From source file:edu.iu.daal_cholesky.CLYDaalLauncher.java

License:Apache License

/**
 * Launches all the tasks in order.//from w  w w.jav a2s .co m
 */
@Override
public int run(String[] args) throws Exception {

    /* Put shared libraries into the distributed cache */
    Configuration conf = this.getConf();

    Initialize init = new Initialize(conf, args);

    /* Put shared libraries into the distributed cache */
    init.loadDistributedLibs();

    // load args
    init.loadSysArgs();

    //load app args
    conf.setInt(HarpDAALConstants.FILE_DIM, Integer.parseInt(args[init.getSysArgNum()]));

    // launch job
    System.out.println("Starting Job");
    long perJobSubmitTime = System.currentTimeMillis();
    System.out.println(
            "Start Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));

    Job clyJob = init.createJob("clyJob", CLYDaalLauncher.class, CLYDaalCollectiveMapper.class);

    // finish job
    boolean jobSuccess = clyJob.waitForCompletion(true);
    System.out.println(
            "End Job#" + " " + new SimpleDateFormat("HH:mm:ss.SSS").format(Calendar.getInstance().getTime()));
    System.out.println(
            "| Job#" + " Finished in " + (System.currentTimeMillis() - perJobSubmitTime) + " miliseconds |");
    if (!jobSuccess) {
        clyJob.killJob();
        System.out.println("clyJob failed");
    }

    return 0;
}