Example usage for org.apache.hadoop.mapreduce Job getJobName

List of usage examples for org.apache.hadoop.mapreduce Job getJobName

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Job getJobName.

Prototype

public String getJobName() 

Source Link

Document

The user-specified job name.

Usage

From source file:org.springframework.data.hadoop.batch.mapreduce.JobTasklet.java

License:Apache License

private static void saveJobStats(Job job, StepExecution stepExecution) {
    if (stepExecution == null) {
        return;/* w w  w.j a va  2s.c o  m*/
    }
    ExecutionContext executionContext = stepExecution.getExecutionContext();
    String statusPrefix = "Job Status::";
    executionContext.put(statusPrefix + "ID", JobUtils.getJobId(job).toString());
    executionContext.put(statusPrefix + "Name", job.getJobName());
    executionContext.put(statusPrefix + "Tracking URL", job.getTrackingURL());
    executionContext.put(statusPrefix + "State", JobUtils.getStatus(job).toString());
    try {
        for (String cgName : job.getCounters().getGroupNames()) {
            CounterGroup group = job.getCounters().getGroup(cgName);
            Iterator<Counter> ci = group.iterator();
            while (ci.hasNext()) {
                Counter c = ci.next();
                executionContext.put(group.getDisplayName().trim() + "::" + c.getDisplayName().trim(),
                        c.getValue());
            }
        }
    } catch (Exception ignore) {
    }
}

From source file:org.springframework.data.hadoop.mapreduce.JobExecutor.java

License:Apache License

/**
 * Stops running job.//from   w  ww  .j  a  v  a 2s. c  o m
 *
 * @param listener job listener
 * @return list of stopped jobs.
 * @throws Exception
 */
protected Collection<Job> stopJobs(final JobListener listener) {
    shuttingDown = true;

    final Collection<Job> jbs = findJobs();
    final List<Job> killedJobs = new ArrayList<Job>();

    taskExecutor.execute(new Runnable() {
        @Override
        public void run() {

            Object listenerInit = null;
            if (listener != null) {
                listenerInit = listener.beforeAction();
            }

            try {
                for (final Job job : jbs) {
                    try {
                        if (JobUtils.getStatus(job).isRunning()) {
                            synchronized (killedJobs) {
                                killedJobs.add(job);
                            }
                            log.info("Killing job [" + job.getJobName() + "]");
                            job.killJob();
                            if (listener != null) {
                                listener.jobKilled(job);
                            }
                        }
                    } catch (Exception ex) {
                        log.warn("Cannot kill job [" + job.getJobName() + "]", ex);
                        if (RuntimeException.class.isAssignableFrom(ex.getClass())) {
                            throw (RuntimeException) ex;
                        } else {
                            throw new IllegalStateException(ex);
                        }
                    }
                }
            } finally {
                if (listener != null) {
                    listener.afterAction(listenerInit);
                }
            }
        }
    });

    return jbs;
}

From source file:org.springframework.data.hadoop.mapreduce.JobExecutor.java

License:Apache License

protected Collection<Job> startJobs(final JobListener listener) {
    final Collection<Job> jbs = findJobs();

    final List<Job> started = new ArrayList<Job>();

    taskExecutor.execute(new Runnable() {
        @Override/*  w w  w.ja  v a 2s.c om*/
        public void run() {

            Object listenerInit = null;
            if (listener != null) {
                listenerInit = listener.beforeAction();
            }

            try {

                for (final Job job : jbs) {
                    boolean succes = false;
                    try {
                        // job is already running - ignore it
                        if (JobUtils.getStatus(job).isStarted()) {
                            log.info("Job [" + job.getJobName() + "] already started; skipping it...");
                            break;
                        }

                        log.info("Starting job [" + job.getJobName() + "]");
                        synchronized (started) {
                            started.add(job);
                        }
                        if (!waitForCompletion) {
                            succes = true;
                            job.submit();
                        } else {
                            succes = job.waitForCompletion(verbose);
                            log.info("Completed job [" + job.getJobName() + "]");
                            if (listener != null) {
                                listener.jobFinished(job);
                            }

                        }
                    } catch (InterruptedException ex) {
                        log.warn("Job [" + job.getJobName() + "] killed");
                        throw new IllegalStateException(ex);
                    } catch (Exception ex) {
                        log.warn("Cannot start job [" + job.getJobName() + "]", ex);
                        throw new IllegalStateException(ex);
                    }

                    if (!succes) {
                        if (!shuttingDown) {
                            JobStatus status = JobUtils.getStatus(job);
                            if (JobStatus.KILLED == status) {
                                throw new IllegalStateException("Job " + job.getJobName() + "] killed");
                            } else {
                                throw new IllegalStateException(
                                        "Job " + job.getJobName() + "] failed to start; status=" + status);
                            }
                        } else {
                            log.info("Job [" + job.getJobName() + "] killed by shutdown");
                        }
                    }
                }
            } finally {
                if (listener != null) {
                    listener.afterAction(listenerInit);
                }
            }
        }
    });

    return started;
}

From source file:org.springframework.hadoop.context.DefaultContextLoader.java

License:Apache License

private Job getJobInternal(ApplicationContextReference reference, String jobName) {

    Job job = null;
    if (jobName == null) {
        job = reference.getContext().getBean(Job.class);
    } else {/*from   w  ww.  j  ava 2s.c o m*/
        job = reference.getContext().getBean(jobName, Job.class);
    }

    if (job != null) {
        Configuration configuration = job.getConfiguration();
        configuration.set(SPRING_CONFIG_BOOTSTRAP,
                PropertiesUtils.propertiesToString(reference.getBootstrap()));
        configuration.set(SPRING_CONFIG_LOCATION, reference.getConfigLocation());
        reference.increment();
        if (job.getJobName() != null) {
            jobName = job.getJobName();
            configuration.set(SPRING_JOB_NAME, jobName);
        }
    }

    return job;

}

From source file:org.springframework.hadoop.JobTemplate.java

License:Apache License

private boolean runFromTemplate(Job template, Properties bootstrap) {

    try {/*from   w  w  w .j  ava  2s  .c o  m*/

        // Construct the new complete configuration *before* the Job is
        // initialized...
        Configuration configuration = template.getConfiguration();
        mergeExtraConfiguration(configuration);
        // Leave the original Job intact (so it can be a singleton).
        Job job = new Job(configuration, template.getJobName());

        if (configuration.get(SPRING_INPUT_PATHS) != null) {
            for (String path : StringUtils
                    .commaDelimitedListToStringArray(configuration.get(SPRING_INPUT_PATHS))) {
                FileInputFormat.addInputPath(job, new Path(path));
            }
        }
        if (configuration.get(SPRING_OUTPUT_PATH) != null) {
            String path = configuration.get(SPRING_OUTPUT_PATH);
            FileOutputFormat.setOutputPath(job, new Path(path));
        }

        return job.waitForCompletion(verbose);

    } catch (RuntimeException e) {
        throw e;
    } catch (Exception e) {
        throw new HadoopException("Cannot execute Job", e);
    } finally {
        HadoopApplicationContextUtils.releaseJob(template);
    }

}

From source file:org.trend.hgraph.mapreduce.lib.input.Driver.java

License:Apache License

@Override
public int run(String[] args) throws Exception {
    if (null == args || args.length == 0) {
        System.err.println("no any option specified !!");
        printUsage();//from  w w  w. j av  a  2 s .  c o  m
        return -1;
    }

    int mustStartIdx = -1;
    String cmd = null;
    for (int idx = 0; idx < args.length; idx++) {
        cmd = args[idx];
        if (cmd.startsWith("-")) {
            if (mustStartIdx > -1) {
                System.err.println("option order is incorrect !!");
                printUsage();
                return -1;
            }

            if (cmd.equals("-h") || cmd.equals("--help")) {
                printUsage();
                return 0;
            } else if (cmd.equals("-m") || cmd.equals("--mappers-for-one-region")) {
                idx++;
                cmd = args[idx];
                try {
                    mappersForOneRegion = Integer.parseInt(cmd);
                } catch (NumberFormatException e) {
                    System.err.println("-m shall be a numeric value, m:" + cmd);
                    printUsage();
                    return -1;
                }

            } else if (cmd.equals("-b") || cmd.equals("--bypass-rowkeys")) {
                idx++;
                cmd = args[idx];
                try {
                    bypassRowKeys = Integer.parseInt(cmd);
                } catch (NumberFormatException e) {
                    System.err.println("-b shall be a numeric value, b:" + cmd);
                    printUsage();
                    return -1;
                }

            } else {
                System.err.println("Not a defined option:" + cmd);
                printUsage();
                return 1;
            }
        } else {
            if (mustStartIdx < 0) {
                mustStartIdx = idx;
            }
        }
    }

    if (mustStartIdx + 2 != args.length) {
        System.err.println("The must options not satisfied !!");
        printUsage();
        return 1;
    }

    LOGGER.info("start to run " + this.getClass().getName() + " with options:" + Arrays.toString(args));

    String tableName = args[mustStartIdx];
    String outputPath = args[mustStartIdx + 1];
    Configuration conf = this.getConf();

    LOGGER.info(HBaseGraphConstants.HBASE_GRAPH_TABLE_VERTEX_NAME_KEY + "=" + tableName);
    conf.set(HBaseGraphConstants.HBASE_GRAPH_TABLE_VERTEX_NAME_KEY, tableName);

    LOGGER.info("outputPath=" + outputPath);

    conf.set(CalculateInputSplitMapper.Mapper.BY_PASS_KEYS, bypassRowKeys + "");
    conf.set(CalculateInputSplitReducer.MAPPERS_FOR_ONE_REGION, mappersForOneRegion + "");

    Job job = createInputSplitJob(conf, outputPath);
    boolean jobSucceed = job.waitForCompletion(true);

    String jobName = job.getJobName();
    if (jobSucceed) {
        LOGGER.info("job:" + jobName + " ran successfully !!");
    } else {
        LOGGER.error("job:" + jobName + " ran failed !!");
        return -1;
    }
    return 0;
}

From source file:org.trend.hgraph.mapreduce.pagerank.Driver.java

License:Apache License

@Override
public int run(String[] args) throws Exception {
    if (null == args || args.length == 0) {
        System.err.println("args is 0");
        printUsage();//from   w  w  w  .  j a  v a 2 s  . co m
        return 1;
    }

    String arg = null;
    int startMustIdx = -1;
    for (int a = 0; a < args.length; a++) {
        arg = args[a];
        if (arg.startsWith("-")) {
            if (startMustIdx >= 0) {
                System.err.println("options order incorrect !!");
                printUsage();
                return 1;
            }

            // optional
            if ("-c".equals(arg) || "--vertices-total-count".equals(arg)) {
                includeVeticesTotalCount = true;
            } else if ("-i".equals(arg) || "--import-result".equals(arg)) {
                importResults = true;
            } else if ("-h".equals(arg) || "--help".equals(arg)) {
                printUsage();
                return 0;
            } else if ("-t".equals(arg) || "--threshold".equals(arg)) {
                a++;
                String tmpArg = args[a];
                try {
                    pageRankThreshold = Long.parseLong(tmpArg);
                } catch (NumberFormatException e) {
                    System.err.println("parsing pageRank threshold failed, value:" + tmpArg);
                    printUsage();
                    return 1;
                }
            } else if ("-e".equals(arg) || "".equals("--iteration")) {
                a++;
                String tmpArg = args[a];
                try {
                    pageRankIterations = Long.parseLong(tmpArg);
                } catch (NumberFormatException e) {
                    System.err.println("parsing pageRank threshold failed, value:" + tmpArg);
                    printUsage();
                    return 1;
                }
            } else if ("-g".equals(arg) || "--give-vertices-total-count".equals(arg)) {
                a++;
                String tmpArg = args[a];
                try {
                    verticesTotalCount = Long.parseLong(tmpArg);
                } catch (NumberFormatException e) {
                    System.err.println("parsing pageRank threshold failed, value:" + tmpArg);
                    printUsage();
                    return 1;
                }
            } else if ("-p".equals(arg) || "--input-splits-path".equals(arg)) {
                a++;
                inputSplitsPath = args[a];
            } else {
                System.err.println("Not a defined option:" + arg);
                printUsage();
                return 1;
            }
        } else {
            // must
            if (startMustIdx < 0)
                startMustIdx = a;
        }
    }

    if (startMustIdx + 3 != args.length) {
        System.err.println("The must options not satisfied !!");
        printUsage();
        return 1;
    }

    LOGGER.info("start to run " + this.getClass().getName() + " with options:" + Arrays.toString(args));

    Configuration conf = getConf();
    Class<? extends TableInputFormat> tableInputFormat = TableInputFormat.class;
    String vertexTableName = args[startMustIdx];
    String edgeTableName = args[startMustIdx + 1];
    String outputBasePath = args[startMustIdx + 2];

    LOGGER.info(HBaseGraphConstants.HBASE_GRAPH_TABLE_VERTEX_NAME_KEY + "=" + vertexTableName);
    conf.set(HBaseGraphConstants.HBASE_GRAPH_TABLE_VERTEX_NAME_KEY, vertexTableName);

    LOGGER.info(HBaseGraphConstants.HBASE_GRAPH_TABLE_EDGE_NAME_KEY + "=" + edgeTableName);
    conf.set(HBaseGraphConstants.HBASE_GRAPH_TABLE_EDGE_NAME_KEY, edgeTableName);

    LOGGER.info("outputBasePath=" + outputBasePath);

    // collect total vertices count
    if (includeVeticesTotalCount && verticesTotalCount == -1L) {
        LOGGER.info("start to collect vertices total count");
        int retCode = 0;
        retCode = collectVeticesTotalCount(conf, vertexTableName);
        if (retCode != 0) {
            System.err.println("run vertices total count job failed, with retCode:" + retCode);
            return retCode;
        }
    } else if (includeVeticesTotalCount && verticesTotalCount != -1L) {
        System.err.println("can not use two options '-c' and '-g' in the same time");
        printUsage();
        return 1;
    }

    // user give the total count manually
    if (verticesTotalCount > 0) {
        conf.set(Constants.PAGE_RANK_VERTICES_TOTAL_COUNT_KEY, verticesTotalCount + "");
        LOGGER.info(Constants.PAGE_RANK_VERTICES_TOTAL_COUNT_KEY + "=" + verticesTotalCount);
    }

    // user give a inputSplitPath for customized TableInputFormat
    if (null != inputSplitsPath && !"".equals(inputSplitsPath)) {
        tableInputFormat = org.trend.hgraph.mapreduce.lib.input.TableInputFormat.class;
        conf.set(org.trend.hgraph.mapreduce.lib.input.TableInputFormat.INPUT_SPLIT_PAIRS_INPUT_PATH,
                inputSplitsPath);
    }

    // run pageRank
    boolean exit = false;
    boolean firstRun = true;
    Job job = null;
    boolean jobSucceed = false;
    long pageRankChangedCount = 0L;
    String inputPath = null;
    long iterations = 0L;
    while (!exit) {
        iterations++;
        LOGGER.info("start to run interation:" + iterations);
        if (firstRun) {
            firstRun = false;
            job = createInitialPageRankJob(conf, outputBasePath, tableInputFormat);
            inputPath = job.getConfiguration().get("mapred.output.dir");
        } else {
            job = createInterMediatePageRankJob(conf, inputPath, outputBasePath);
            inputPath = job.getConfiguration().get("mapred.output.dir");
        }
        jobSucceed = job.waitForCompletion(true);
        if (!jobSucceed) {
            LOGGER.error("run job:" + job.getJobName() + " failed at iteration(s):" + iterations);
            return 1;
        }
        pageRankChangedCount = getPageRankChangedCount(job);
        if (pageRankChangedCount <= pageRankThreshold) {
            exit = true;
            LOGGER.info("threshold reached, pageRankThreshold:" + pageRankThreshold + ", pageRankChangedCount:"
                    + pageRankChangedCount + ", iteration(s):" + iterations);
        }

        if (pageRankIterations == iterations) {
            exit = true;
            LOGGER.info("iterations reached, iteration(s):" + iterations + ", pageRankChangedCount:"
                    + pageRankChangedCount);
        }
    }
    // for test usage
    this.finalOutputPath = inputPath;

    if (importResults) {
        job = ImportPageRanks.createSubmittableJob(conf, inputPath);
        jobSucceed = job.waitForCompletion(true);
        if (!jobSucceed) {
            LOGGER.error("run job:" + job.getJobName() + " failed !!");
            return 1;
        }
    }
    // run all jobs successful !!
    return 0;
}

From source file:org.trend.hgraph.mapreduce.pagerank.Driver.java

License:Apache License

private static int collectVeticesTotalCount(Configuration conf, String vertexTableName)
        throws IOException, InterruptedException, ClassNotFoundException {
    long totalCount = 1L;
    boolean success = false;
    Counter counter = null;/*from  w w  w  . j ava  2 s  . c o  m*/
    String jobName = null;
    try {
        Job job = RowCounter.createSubmittableJob(conf, new String[] { vertexTableName });
        if (job == null) {
            System.err.println("job is null");
            return 1;
        }

        success = job.waitForCompletion(true);
        counter = job.getCounters()
                .findCounter("org.apache.hadoop.hbase.mapreduce.RowCounter$RowCounterMapper$Counters", "ROWS");
        jobName = job.getJobName();
        if (null != counter) {
            totalCount = counter.getValue();
            conf.set(Constants.PAGE_RANK_VERTICES_TOTAL_COUNT_KEY, totalCount + "");
        }
        LOGGER.info(Constants.PAGE_RANK_VERTICES_TOTAL_COUNT_KEY + "=" + totalCount);

    } catch (IOException e) {
        LOGGER.error("run " + jobName + " failed", e);
        throw e;
    } catch (InterruptedException e) {
        LOGGER.error("run " + jobName + " failed", e);
        throw e;
    } catch (ClassNotFoundException e) {
        LOGGER.error("run " + jobName + " failed", e);
        throw e;
    }

    return success ? 0 : -1;
}

From source file:org.trend.hgraph.mapreduce.pagerank.GetNoColumnsRows.java

License:Apache License

@Override
public int run(String[] args) throws Exception {
    if (null == args || args.length == 0) {
        System.err.println("no any option given !!");
        printUsage();/*from  w w w.ja v a  2 s. c  o  m*/
        return -1;
    }

    System.out.println("options:" + Arrays.toString(args));
    boolean and = true;
    String cmd = null;
    int mustStartIdx = -1;
    for (int a = 0; a < args.length; a++) {
        cmd = args[a];
        if (cmd.startsWith("-")) {
            if (mustStartIdx > -1) {
                System.err.println("option order is incorrect !!");
                printUsage();
                return -1;
            }

            if ("-a".equals(cmd)) {
                and = true;
            } else if ("-o".equals(cmd)) {
                and = false;
            } else {
                System.err.println("option is not defined !!");
                printUsage();
                return -1;
            }
        } else {
            if (mustStartIdx == -1) {
                mustStartIdx = a;
            }
        }
    }

    String tableName = args[mustStartIdx];
    String outputPath = args[mustStartIdx + 1];
    List<String> columns = new ArrayList<String>();
    for (int a = mustStartIdx + 2; a < args.length; a++) {
        columns.add(args[a]);
    }

    LOGGER.info("tableName=" + tableName);
    LOGGER.info("outputPath=" + outputPath);
    LOGGER.info("columns=" + columns);

    Configuration conf = this.getConf();
    conf.setBoolean(Mapper.AND_OR, and);
    conf.setStrings(Mapper.NO_COLUMNS, columns.toArray(new String[] {}));

    Job job = createSubmittableJob(conf, tableName, outputPath);
    boolean success = job.waitForCompletion(true);
    if (!success) {
        System.err.println("run job:" + job.getJobName() + " failed");
        return -1;
    }

    // for test
    Counter counter = job.getCounters().findCounter(
            "org.trend.hgraph.mapreduce.pagerank.GetNoColumnsRows$Mapper$Counters", "COLLECTED_ROWS");
    if (null != counter) {
        collectedRow = counter.getValue();
    }

    return 0;
}

From source file:org.trend.hgraph.mapreduce.pagerank.ImportPageRanks.java

License:Apache License

@Override
public int run(String[] args) throws Exception {
    if (null == args || args.length != 2) {
        System.err.println("shall pass only 2 options");
        printUsage();/*from   ww  w .  j av a  2 s . co  m*/
        return 1;
    }

    String inputPath = args[0];
    String vertexTableName = args[1];
    LOGGER.info("pass two options:" + inputPath + ", " + vertexTableName);

    Configuration conf = getConf();
    conf.set(HBaseGraphConstants.HBASE_GRAPH_TABLE_VERTEX_NAME_KEY, vertexTableName);

    Job job = createSubmittableJob(conf, inputPath);
    String jobName = job.getJobName();
    LOGGER.info("start to run job:" + jobName);
    boolean succeed = job.waitForCompletion(true);
    if (!succeed)
        return 1;
    LOGGER.info("run job:" + jobName + " finished");
    return 0;
}