Example usage for org.apache.hadoop.mapred JobClient close

List of usage examples for org.apache.hadoop.mapred JobClient close

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred JobClient close.

Prototype

@Override
public synchronized void close() throws IOException 

Source Link

Document

Close the JobClient.

Usage

From source file:DataJoinJob.java

License:Apache License

/**
 * Submit/run a map/reduce job./*from  w w  w  .  jav a  2 s .c  o  m*/
 * 
 * @param job
 * @return true for success
 * @throws IOException
 */
public static boolean runJob(JobConf job) throws IOException {
    JobClient jc = new JobClient(job);
    boolean sucess = true;
    RunningJob running = null;
    try {
        running = jc.submitJob(job);
        JobID jobId = running.getID();
        System.out.println("Job " + jobId + " is submitted");
        while (!running.isComplete()) {
            System.out.println("Job " + jobId + " is still running.");
            try {
                Thread.sleep(60000);
            } catch (InterruptedException e) {
            }
            running = jc.getJob(jobId);
        }
        sucess = running.isSuccessful();
    } finally {
        if (!sucess && (running != null)) {
            running.killJob();
        }
        jc.close();
    }
    return sucess;
}

From source file:com.cloudera.circus.test.TestXTest.java

License:Open Source License

@Test
@TestHadoop/*from  w  w w.j a  va2 s . c o  m*/
public void testHadoopMinicluster() throws Exception {
    JobConf conf = getHadoopConf();
    Assert.assertNotNull(conf);
    FileSystem fs = FileSystem.get(conf);
    Assert.assertNotNull(fs);
    Assert.assertEquals(fs.getUri().getScheme(), "hdfs");
    Assert.assertTrue(fs.exists(getHadoopTestDir()));
    fs.close();
    JobClient jobClient = new JobClient(conf);
    Assert.assertNotNull(jobClient);
    jobClient.close();
}

From source file:com.cloudera.circus.test.TestXTest.java

License:Open Source License

@Test
@TestHadoop/* www . ja v a2 s .c o m*/
public void testHadoopMapReduce() throws Exception {
    JobConf conf = getHadoopConf();
    FileSystem fs = FileSystem.get(conf);
    JobClient jobClient = new JobClient(conf);
    try {
        Path inputDir = new Path(getHadoopTestDir(), "input");
        Path outputDir = new Path(getHadoopTestDir(), "output");

        fs.mkdirs(inputDir);
        Writer writer = new OutputStreamWriter(fs.create(new Path(inputDir, "data.txt")));
        writer.write("a\n");
        writer.write("b\n");
        writer.write("c\n");
        writer.close();

        JobConf jobConf = getHadoopConf();
        jobConf.setInt("mapred.map.tasks", 1);
        jobConf.setInt("mapred.map.max.attempts", 1);
        jobConf.setInt("mapred.reduce.max.attempts", 1);
        jobConf.set("mapred.input.dir", inputDir.toString());
        jobConf.set("mapred.output.dir", outputDir.toString());
        final RunningJob runningJob = jobClient.submitJob(jobConf);
        waitFor(60 * 1000, true, new Predicate() {
            @Override
            public boolean evaluate() throws Exception {
                return runningJob.isComplete();
            }
        });
        Assert.assertTrue(runningJob.isSuccessful());
        Assert.assertTrue(fs.exists(new Path(outputDir, "part-00000")));
        BufferedReader reader = new BufferedReader(
                new InputStreamReader(fs.open(new Path(outputDir, "part-00000"))));
        Assert.assertTrue(reader.readLine().trim().endsWith("a"));
        Assert.assertTrue(reader.readLine().trim().endsWith("b"));
        Assert.assertTrue(reader.readLine().trim().endsWith("c"));
        Assert.assertNull(reader.readLine());
        reader.close();
    } finally {
        fs.close();
        jobClient.close();
    }
}

From source file:com.cloudera.lib.service.hadoop.HadoopService.java

License:Open Source License

protected void closeJobClient(JobClient jobClient) throws IOException {
    jobClient.close();
}

From source file:com.zhangyue.zeus.controller.TaskController.java

License:Open Source License

/**
 * /*from  w w w  . j a  va  2  s.co  m*/
 * 
 * @param id ID  job_201312021648_95803  jtIdentifier = 201312021648  job number  95803
 * @param joId map-reduceID
 * @return
 */
@RequestMapping(value = "/stop/{id}/{jobId}")
public ModelAndView killJob(@PathVariable("id") Integer id, @PathVariable("jobId") String jobId) {
    int res = 0;
    String[] args = { "-kill", jobId };
    String msg = Constants.BLANK;
    String msgType = Constants.BLANK;
    // ?
    QueriesEntity queriesEntity = taskManageService.findSubmitTaskById(id);
    try {
        JobClient jobClient = new JobClient();
        res = ToolRunner.run(jobClient, args);
        jobClient.close();
    } catch (Exception e) {
        LOG.error("Hdoop  job -kill  jobId  exception", e);
    }
    if (res == 0) {
        // ???
        queriesEntity.setStatus(QueriesEntity.RunningStatus.KILLED.getTypeName());
        taskManageService.setQuery(queriesEntity);
        taskManageService.updateQueryTask();
        msg = "????";
        msgType = "success";
    } else {
        msg = "??,??";
        msgType = "error";
    }
    request.setAttribute("msg", msg);
    request.setAttribute("msgType", msgType);
    return new ModelAndView(new RedirectView(String.valueOf(id)));
}

From source file:org.apache.ambari.servicemonitor.utils.MonitorUtils.java

License:Apache License

public static void closeJobClient(JobClient jobClient) throws IOException {
    if (jobClient != null) {
        jobClient.close();
    }/*from   w  w w .  j a  va 2 s.c o  m*/
}

From source file:org.apache.oozie.action.hadoop.JavaActionExecutor.java

License:Apache License

public void submitLauncher(FileSystem actionFs, Context context, WorkflowAction action)
        throws ActionExecutorException {
    JobClient jobClient = null;
    boolean exception = false;
    try {//  w  w  w.  j a va  2  s .  c o  m
        Path appPathRoot = new Path(context.getWorkflow().getAppPath());

        // app path could be a file
        if (actionFs.isFile(appPathRoot)) {
            appPathRoot = appPathRoot.getParent();
        }

        Element actionXml = XmlUtils.parseXml(action.getConf());

        // action job configuration
        Configuration actionConf = loadHadoopDefaultResources(context, actionXml);
        setupActionConf(actionConf, context, actionXml, appPathRoot);
        LOG.debug("Setting LibFilesArchives ");
        setLibFilesArchives(context, actionXml, appPathRoot, actionConf);

        String jobName = actionConf.get(HADOOP_JOB_NAME);
        if (jobName == null || jobName.isEmpty()) {
            jobName = XLog.format("oozie:action:T={0}:W={1}:A={2}:ID={3}", getType(),
                    context.getWorkflow().getAppName(), action.getName(), context.getWorkflow().getId());
            actionConf.set(HADOOP_JOB_NAME, jobName);
        }

        injectActionCallback(context, actionConf);

        if (actionConf.get(ACL_MODIFY_JOB) == null || actionConf.get(ACL_MODIFY_JOB).trim().equals("")) {
            // ONLY in the case where user has not given the
            // modify-job ACL specifically
            if (context.getWorkflow().getAcl() != null) {
                // setting the group owning the Oozie job to allow anybody in that
                // group to modify the jobs.
                actionConf.set(ACL_MODIFY_JOB, context.getWorkflow().getAcl());
            }
        }

        // Setting the credential properties in launcher conf
        JobConf credentialsConf = null;
        HashMap<String, CredentialsProperties> credentialsProperties = setCredentialPropertyToActionConf(
                context, action, actionConf);
        if (credentialsProperties != null) {

            // Adding if action need to set more credential tokens
            credentialsConf = new JobConf(false);
            XConfiguration.copy(actionConf, credentialsConf);
            setCredentialTokens(credentialsConf, context, action, credentialsProperties);

            // insert conf to action conf from credentialsConf
            for (Entry<String, String> entry : credentialsConf) {
                if (actionConf.get(entry.getKey()) == null) {
                    actionConf.set(entry.getKey(), entry.getValue());
                }
            }
        }

        JobConf launcherJobConf = createLauncherConf(actionFs, context, action, actionXml, actionConf);

        LOG.debug("Creating Job Client for action " + action.getId());
        jobClient = createJobClient(context, launcherJobConf);
        String launcherId = LauncherMapperHelper.getRecoveryId(launcherJobConf, context.getActionDir(),
                context.getRecoveryId());
        boolean alreadyRunning = launcherId != null;
        RunningJob runningJob;

        // if user-retry is on, always submit new launcher
        boolean isUserRetry = ((WorkflowActionBean) action).isUserRetry();

        if (alreadyRunning && !isUserRetry) {
            runningJob = jobClient.getJob(JobID.forName(launcherId));
            if (runningJob == null) {
                String jobTracker = launcherJobConf.get(HADOOP_JOB_TRACKER);
                throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "JA017",
                        "unknown job [{0}@{1}], cannot recover", launcherId, jobTracker);
            }
        } else {
            LOG.debug("Submitting the job through Job Client for action " + action.getId());

            // setting up propagation of the delegation token.
            HadoopAccessorService has = Services.get().get(HadoopAccessorService.class);
            Token<DelegationTokenIdentifier> mrdt = jobClient
                    .getDelegationToken(has.getMRDelegationTokenRenewer(launcherJobConf));
            launcherJobConf.getCredentials().addToken(HadoopAccessorService.MR_TOKEN_ALIAS, mrdt);

            // insert credentials tokens to launcher job conf if needed
            if (needInjectCredentials() && credentialsConf != null) {
                for (Token<? extends TokenIdentifier> tk : credentialsConf.getCredentials().getAllTokens()) {
                    Text fauxAlias = new Text(tk.getKind() + "_" + tk.getService());
                    LOG.debug("ADDING TOKEN: " + fauxAlias);
                    launcherJobConf.getCredentials().addToken(fauxAlias, tk);
                }
            } else {
                LOG.info("No need to inject credentials.");
            }
            runningJob = jobClient.submitJob(launcherJobConf);
            if (runningJob == null) {
                throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "JA017",
                        "Error submitting launcher for action [{0}]", action.getId());
            }
            launcherId = runningJob.getID().toString();
            LOG.debug("After submission get the launcherId " + launcherId);
        }

        String jobTracker = launcherJobConf.get(HADOOP_JOB_TRACKER);
        String consoleUrl = runningJob.getTrackingURL();
        context.setStartData(launcherId, jobTracker, consoleUrl);
    } catch (Exception ex) {
        exception = true;
        throw convertException(ex);
    } finally {
        if (jobClient != null) {
            try {
                jobClient.close();
            } catch (Exception e) {
                if (exception) {
                    LOG.error("JobClient error: ", e);
                } else {
                    throw convertException(e);
                }
            }
        }
    }
}

From source file:org.apache.oozie.action.hadoop.JavaActionExecutor.java

License:Apache License

@Override
public void check(Context context, WorkflowAction action) throws ActionExecutorException {
    JobClient jobClient = null;
    boolean exception = false;
    LogUtils.setLogInfo(action);//from  w ww . ja  v  a  2s .c o  m
    try {
        Element actionXml = XmlUtils.parseXml(action.getConf());
        FileSystem actionFs = context.getAppFileSystem();
        JobConf jobConf = createBaseHadoopConf(context, actionXml);
        jobClient = createJobClient(context, jobConf);
        RunningJob runningJob = getRunningJob(context, action, jobClient);
        if (runningJob == null) {
            context.setExecutionData(FAILED, null);
            throw new ActionExecutorException(ActionExecutorException.ErrorType.FAILED, "JA017",
                    "Could not lookup launched hadoop Job ID [{0}] which was associated with "
                            + " action [{1}].  Failing this action!",
                    getActualExternalId(action), action.getId());
        }
        if (runningJob.isComplete()) {
            Path actionDir = context.getActionDir();
            String newId = null;
            // load sequence file into object
            Map<String, String> actionData = LauncherMapperHelper.getActionData(actionFs, actionDir, jobConf);
            if (actionData.containsKey(LauncherMapper.ACTION_DATA_NEW_ID)) {
                newId = actionData.get(LauncherMapper.ACTION_DATA_NEW_ID);
                String launcherId = action.getExternalId();
                runningJob = jobClient.getJob(JobID.forName(newId));
                if (runningJob == null) {
                    context.setExternalStatus(FAILED);
                    throw new ActionExecutorException(ActionExecutorException.ErrorType.FAILED, "JA017",
                            "Unknown hadoop job [{0}] associated with action [{1}].  Failing this action!",
                            newId, action.getId());
                }
                context.setExternalChildIDs(newId);
                LOG.info(XLog.STD, "External ID swap, old ID [{0}] new ID [{1}]", launcherId, newId);
            } else {
                String externalIDs = actionData.get(LauncherMapper.ACTION_DATA_EXTERNAL_CHILD_IDS);
                if (externalIDs != null) {
                    context.setExternalChildIDs(externalIDs);
                    LOG.info(XLog.STD, "Hadoop Jobs launched : [{0}]", externalIDs);
                }
            }
            if (runningJob.isComplete()) {
                // fetching action output and stats for the Map-Reduce action.
                if (newId != null) {
                    actionData = LauncherMapperHelper.getActionData(actionFs, context.getActionDir(), jobConf);
                }
                LOG.info(XLog.STD, "action completed, external ID [{0}]", action.getExternalId());
                if (LauncherMapperHelper.isMainSuccessful(runningJob)) {
                    if (getCaptureOutput(action) && LauncherMapperHelper.hasOutputData(actionData)) {
                        context.setExecutionData(SUCCEEDED, PropertiesUtils
                                .stringToProperties(actionData.get(LauncherMapper.ACTION_DATA_OUTPUT_PROPS)));
                        LOG.info(XLog.STD, "action produced output");
                    } else {
                        context.setExecutionData(SUCCEEDED, null);
                    }
                    if (LauncherMapperHelper.hasStatsData(actionData)) {
                        context.setExecutionStats(actionData.get(LauncherMapper.ACTION_DATA_STATS));
                        LOG.info(XLog.STD, "action produced stats");
                    }
                    getActionData(actionFs, runningJob, action, context);
                } else {
                    String errorReason;
                    if (actionData.containsKey(LauncherMapper.ACTION_DATA_ERROR_PROPS)) {
                        Properties props = PropertiesUtils
                                .stringToProperties(actionData.get(LauncherMapper.ACTION_DATA_ERROR_PROPS));
                        String errorCode = props.getProperty("error.code");
                        if ("0".equals(errorCode)) {
                            errorCode = "JA018";
                        }
                        if ("-1".equals(errorCode)) {
                            errorCode = "JA019";
                        }
                        errorReason = props.getProperty("error.reason");
                        LOG.warn("Launcher ERROR, reason: {0}", errorReason);
                        String exMsg = props.getProperty("exception.message");
                        String errorInfo = (exMsg != null) ? exMsg : errorReason;
                        context.setErrorInfo(errorCode, errorInfo);
                        String exStackTrace = props.getProperty("exception.stacktrace");
                        if (exMsg != null) {
                            LOG.warn("Launcher exception: {0}{E}{1}", exMsg, exStackTrace);
                        }
                    } else {
                        errorReason = XLog.format("LauncherMapper died, check Hadoop LOG for job [{0}:{1}]",
                                action.getTrackerUri(), action.getExternalId());
                        LOG.warn(errorReason);
                    }
                    context.setExecutionData(FAILED_KILLED, null);
                }
            } else {
                context.setExternalStatus("RUNNING");
                LOG.info(XLog.STD, "checking action, hadoop job ID [{0}] status [RUNNING]", runningJob.getID());
            }
        } else {
            context.setExternalStatus("RUNNING");
            LOG.info(XLog.STD, "checking action, hadoop job ID [{0}] status [RUNNING]", runningJob.getID());
        }
    } catch (Exception ex) {
        LOG.warn("Exception in check(). Message[{0}]", ex.getMessage(), ex);
        exception = true;
        throw convertException(ex);
    } finally {
        if (jobClient != null) {
            try {
                jobClient.close();
            } catch (Exception e) {
                if (exception) {
                    LOG.error("JobClient error: ", e);
                } else {
                    throw convertException(e);
                }
            }
        }
    }
}

From source file:org.apache.oozie.action.hadoop.JavaActionExecutor.java

License:Apache License

@Override
public void kill(Context context, WorkflowAction action) throws ActionExecutorException {
    JobClient jobClient = null;
    boolean exception = false;
    try {//  ww w .  jav  a 2 s . co m
        Element actionXml = XmlUtils.parseXml(action.getConf());
        JobConf jobConf = createBaseHadoopConf(context, actionXml);
        jobClient = createJobClient(context, jobConf);
        RunningJob runningJob = getRunningJob(context, action, jobClient);
        if (runningJob != null) {
            runningJob.killJob();
        }
        context.setExternalStatus(KILLED);
        context.setExecutionData(KILLED, null);
    } catch (Exception ex) {
        exception = true;
        throw convertException(ex);
    } finally {
        try {
            FileSystem actionFs = context.getAppFileSystem();
            cleanUpActionDir(actionFs, context);
            if (jobClient != null) {
                jobClient.close();
            }
        } catch (Exception ex) {
            if (exception) {
                LOG.error("Error: ", ex);
            } else {
                throw convertException(ex);
            }
        }
    }
}

From source file:org.apache.oozie.action.hadoop.MapReduceActionExecutor.java

License:Apache License

@Override
public void end(Context context, WorkflowAction action) throws ActionExecutorException {
    super.end(context, action);
    JobClient jobClient = null;
    boolean exception = false;
    try {//  w  ww.ja v  a  2  s  .  com
        if (action.getStatus() == WorkflowAction.Status.OK) {
            Element actionXml = XmlUtils.parseXml(action.getConf());
            JobConf jobConf = createBaseHadoopConf(context, actionXml);
            jobClient = createJobClient(context, jobConf);
            RunningJob runningJob = jobClient.getJob(JobID.forName(action.getExternalChildIDs()));
            if (runningJob == null) {
                throw new ActionExecutorException(ActionExecutorException.ErrorType.FAILED, "MR002",
                        "Unknown hadoop job [{0}] associated with action [{1}].  Failing this action!",
                        action.getExternalChildIDs(), action.getId());
            }

            Counters counters = runningJob.getCounters();
            if (counters != null) {
                ActionStats stats = new MRStats(counters);
                String statsJsonString = stats.toJSON();
                context.setVar(HADOOP_COUNTERS, statsJsonString);

                // If action stats write property is set to false by user or
                // size of stats is greater than the maximum allowed size,
                // do not store the action stats
                if (Boolean.parseBoolean(
                        evaluateConfigurationProperty(actionXml, OOZIE_ACTION_EXTERNAL_STATS_WRITE, "false"))
                        && (statsJsonString.getBytes().length <= getMaxExternalStatsSize())) {
                    context.setExecutionStats(statsJsonString);
                    log.debug("Printing stats for Map-Reduce action as a JSON string : [{0}]", statsJsonString);
                }
            } else {
                context.setVar(HADOOP_COUNTERS, "");
                XLog.getLog(getClass()).warn("Could not find Hadoop Counters for: [{0}]",
                        action.getExternalChildIDs());
            }
        }
    } catch (Exception ex) {
        exception = true;
        throw convertException(ex);
    } finally {
        if (jobClient != null) {
            try {
                jobClient.close();
            } catch (Exception e) {
                if (exception) {
                    log.error("JobClient error: ", e);
                } else {
                    throw convertException(e);
                }
            }
        }
    }
}