Example usage for java.lang ReflectiveOperationException getLocalizedMessage

List of usage examples for java.lang ReflectiveOperationException getLocalizedMessage

Introduction

In this page you can find the example usage for java.lang ReflectiveOperationException getLocalizedMessage.

Prototype

public String getLocalizedMessage() 

Source Link

Document

Creates a localized description of this throwable.

Usage

From source file:org.apache.kylin.job.common.MapReduceExecutable.java

@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    final String mapReduceJobClass = getMapReduceJobClass();
    String params = getMapReduceParams();
    Preconditions.checkNotNull(mapReduceJobClass);
    Preconditions.checkNotNull(params);//w ww . jav a 2 s .c  o  m
    try {
        Job job;
        final Map<String, String> extra = executableManager.getOutput(getId()).getExtra();
        if (extra.containsKey(ExecutableConstants.MR_JOB_ID)) {
            job = new Cluster(new Configuration())
                    .getJob(JobID.forName(extra.get(ExecutableConstants.MR_JOB_ID)));
            logger.info("mr_job_id:" + extra.get(ExecutableConstants.MR_JOB_ID + " resumed"));
        } else {
            final Constructor<? extends AbstractHadoopJob> constructor = ClassUtil
                    .forName(mapReduceJobClass, AbstractHadoopJob.class).getConstructor();
            final AbstractHadoopJob hadoopJob = constructor.newInstance();
            hadoopJob.setAsync(true); // so the ToolRunner.run() returns right away
            logger.info("parameters of the MapReduceExecutable:");
            logger.info(params);
            String[] args = params.trim().split("\\s+");
            try {
                //for async mr job, ToolRunner just return 0;
                ToolRunner.run(hadoopJob, args);
            } catch (Exception ex) {
                StringBuilder log = new StringBuilder();
                logger.error("error execute " + this.toString(), ex);
                StringWriter stringWriter = new StringWriter();
                ex.printStackTrace(new PrintWriter(stringWriter));
                log.append(stringWriter.toString()).append("\n");
                log.append("result code:").append(2);
                return new ExecuteResult(ExecuteResult.State.ERROR, log.toString());
            }
            job = hadoopJob.getJob();
        }
        final StringBuilder output = new StringBuilder();
        final HadoopCmdOutput hadoopCmdOutput = new HadoopCmdOutput(job, output);

        final String restStatusCheckUrl = getRestStatusCheckUrl(job, context.getConfig());
        if (restStatusCheckUrl == null) {
            logger.error("restStatusCheckUrl is null");
            return new ExecuteResult(ExecuteResult.State.ERROR, "restStatusCheckUrl is null");
        }
        String mrJobId = hadoopCmdOutput.getMrJobId();
        HadoopStatusChecker statusChecker = new HadoopStatusChecker(restStatusCheckUrl, mrJobId, output);
        JobStepStatusEnum status = JobStepStatusEnum.NEW;
        while (!isDiscarded()) {
            JobStepStatusEnum newStatus = statusChecker.checkStatus();
            if (status == JobStepStatusEnum.WAITING && (newStatus == JobStepStatusEnum.FINISHED
                    || newStatus == JobStepStatusEnum.ERROR || newStatus == JobStepStatusEnum.RUNNING)) {
                final long waitTime = System.currentTimeMillis() - getStartTime();
                setMapReduceWaitTime(waitTime);
            }
            status = newStatus;
            executableManager.addJobInfo(getId(), hadoopCmdOutput.getInfo());
            if (status.isComplete()) {
                hadoopCmdOutput.updateJobCounter();
                final Map<String, String> info = hadoopCmdOutput.getInfo();
                info.put(ExecutableConstants.SOURCE_RECORDS_COUNT, hadoopCmdOutput.getMapInputRecords());
                info.put(ExecutableConstants.SOURCE_RECORDS_SIZE, hadoopCmdOutput.getHdfsBytesRead());
                info.put(ExecutableConstants.HDFS_BYTES_WRITTEN, hadoopCmdOutput.getHdfsBytesWritten());
                executableManager.addJobInfo(getId(), info);

                if (status == JobStepStatusEnum.FINISHED) {
                    return new ExecuteResult(ExecuteResult.State.SUCCEED, output.toString());
                } else {
                    return new ExecuteResult(ExecuteResult.State.FAILED, output.toString());
                }
            }
            Thread.sleep(context.getConfig().getYarnStatusCheckIntervalSeconds() * 1000);
        }
        //TODO kill discarded mr job using "hadoop job -kill " + mrJobId

        return new ExecuteResult(ExecuteResult.State.DISCARDED, output.toString());

    } catch (ReflectiveOperationException e) {
        logger.error("error getMapReduceJobClass, class name:" + getParam(KEY_MR_JOB), e);
        return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage());
    } catch (Exception e) {
        logger.error("error execute " + this.toString(), e);
        return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage());
    }
}