Example usage for org.apache.hadoop.mapreduce Job getJobName

List of usage examples for org.apache.hadoop.mapreduce Job getJobName

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Job getJobName.

Prototype

public String getJobName() 

Source Link

Document

The user-specified job name.

Usage

From source file:org.apache.jena.hadoop.rdf.stats.RdfStats.java

License:Apache License

private boolean runJob(Job job) throws Throwable {
    System.out.println("Submitting Job " + job.getJobName());
    long start = System.nanoTime();
    try {//from w  w w. j a  v a  2  s  . c  o m
        job.submit();
        if (job.monitorAndPrintJob()) {
            System.out.println("Job " + job.getJobName() + " succeeded");
            return true;
        } else {
            System.out.println("Job " + job.getJobName() + " failed");
            return false;
        }
    } catch (Throwable e) {
        System.out.println("Unexpected failure in Job " + job.getJobName());
        throw e;
    } finally {
        long end = System.nanoTime();
        System.out.println("Job " + job.getJobName() + " finished after "
                + String.format("%,d milliseconds", TimeUnit.NANOSECONDS.toMillis(end - start)));
        System.out.println();
    }
}

From source file:org.apache.jena.hadoop.rdf.stats.RdfStats.java

License:Apache License

private void runJobSequence(Job[] jobs, boolean continueOnFailure, boolean continueOnError) {
    for (int i = 0; i < jobs.length; i++) {
        Job job = jobs[i];
        try {/*  w ww .  j  a  va 2 s .  c om*/
            boolean success = this.runJob(job);
            if (!success && !continueOnFailure)
                throw new IllegalStateException(
                        "Unable to complete job sequence because Job " + job.getJobName() + " failed");
        } catch (IllegalStateException e) {
            throw e;
        } catch (Throwable e) {
            if (!continueOnError)
                throw new IllegalStateException(
                        "Unable to complete job sequence because job " + job.getJobName() + " errorred", e);
        }
    }
}

From source file:org.apache.jena.tdbloader4.Utils.java

License:Apache License

public static void setReducers(Job job, Configuration configuration, Logger log) {
    boolean runLocal = configuration.getBoolean(Constants.OPTION_RUN_LOCAL, Constants.OPTION_RUN_LOCAL_DEFAULT);
    int num_reducers = configuration.getInt(Constants.OPTION_NUM_REDUCERS,
            Constants.OPTION_NUM_REDUCERS_DEFAULT);

    // TODO: should we comment this out and let Hadoop decide the number of reducers?
    if (runLocal) {
        if (log != null)
            log.debug("Setting number of reducers to {}", 1);
        job.setNumReduceTasks(1);//from  www. j ava  2  s  . c  o m
    } else {
        if (Constants.NAME_FOURTH.equals(job.getJobName())) {
            job.setPartitionerClass(TotalOrderPartitioner.class);
            num_reducers = 9 * num_reducers;
        }
        job.setNumReduceTasks(num_reducers);
        if (log != null)
            log.debug("Setting number of reducers to {}", num_reducers);
    }
}

From source file:org.apache.kylin.engine.mr.common.AbstractHadoopJob.java

License:Apache License

protected int waitForCompletion(Job job) throws IOException, InterruptedException, ClassNotFoundException {
    int retVal = 0;
    long start = System.nanoTime();
    if (isAsync) {
        job.submit();/*from  w ww  .  j  av  a2  s . c  o  m*/
    } else {
        job.waitForCompletion(true);
        retVal = job.isSuccessful() ? 0 : 1;
        logger.debug("Job '" + job.getJobName() + "' finished "
                + (job.isSuccessful() ? "successfully in " : "with failures.  Time taken ")
                + formatTime((System.nanoTime() - start) / 1000000L));
    }
    return retVal;
}

From source file:org.apache.metron.dataloads.bulk.LeastRecentlyUsedPruner.java

License:Apache License

public static Job createJob(Configuration conf, String table, String cf, String accessTrackerTable,
        String accessTrackerColumnFamily, Long ts) throws IOException {
    Job job = new Job(conf);
    job.setJobName("LeastRecentlyUsedPruner: Pruning " + table + ":" + cf + " since "
            + new SimpleDateFormat().format(new Date(ts)));
    System.out.println("Configuring " + job.getJobName());
    job.setJarByClass(LeastRecentlyUsedPruner.class);
    job.getConfiguration().setLong(PrunerMapper.TIMESTAMP_CONF, ts);
    job.getConfiguration().set(PrunerMapper.ACCESS_TRACKER_NAME_CONF, table);
    job.getConfiguration().set(PrunerMapper.ACCESS_TRACKER_CF_CONF, accessTrackerColumnFamily);
    job.getConfiguration().set(PrunerMapper.ACCESS_TRACKER_TABLE_CONF, accessTrackerTable);
    setupHBaseJob(job, table, cf);/*from  w ww.ja  va  2 s  .  c  o m*/
    job.setNumReduceTasks(0);
    return job;
}

From source file:org.apache.metron.dataloads.bulk.ThreatIntelBulkLoader.java

License:Apache License

public static Job createJob(Configuration conf, String input, String table, String cf,
        String extractorConfigContents, long ts, HbaseConverter converter) throws IOException {
    Job job = new Job(conf);
    job.setJobName("ThreatIntelBulkLoader: " + input + " => " + table + ":" + cf);
    System.out.println("Configuring " + job.getJobName());
    job.setJarByClass(ThreatIntelBulkLoader.class);
    job.setMapperClass(org.apache.metron.dataloads.hbase.mr.BulkLoadMapper.class);
    job.setOutputFormatClass(TableOutputFormat.class);
    job.getConfiguration().set(TableOutputFormat.OUTPUT_TABLE, table);
    job.getConfiguration().set(BulkLoadMapper.COLUMN_FAMILY_KEY, cf);
    job.getConfiguration().set(BulkLoadMapper.CONFIG_KEY, extractorConfigContents);
    job.getConfiguration().set(BulkLoadMapper.LAST_SEEN_KEY, "" + ts);
    job.getConfiguration().set(BulkLoadMapper.CONVERTER_KEY, converter.getClass().getName());
    job.setOutputKeyClass(ImmutableBytesWritable.class);
    job.setOutputValueClass(Put.class);
    job.setNumReduceTasks(0);/*from  ww  w.j  av  a2  s  .co m*/
    ExtractorHandler handler = ExtractorHandler.load(extractorConfigContents);
    handler.getInputFormatHandler().set(job, new Path(input), handler.getConfig());
    return job;
}

From source file:org.apache.metron.dataloads.nonbulk.flatfile.importer.MapReduceImporter.java

License:Apache License

@Override
public void importData(EnumMap<LoadOptions, Optional<Object>> config, ExtractorHandler handler,
        Configuration hadoopConfig) throws IOException {
    String table = (String) config.get(LoadOptions.HBASE_TABLE).get();
    String cf = (String) config.get(LoadOptions.HBASE_CF).get();
    String extractorConfigContents = (String) config.get(LoadOptions.EXTRACTOR_CONFIG).get();
    Job job = Job.getInstance(hadoopConfig);
    List<String> inputs = (List<String>) config.get(LoadOptions.INPUT).get();
    job.setJobName("MapReduceImporter: " + inputs.stream().collect(Collectors.joining(",")) + " => " + table
            + ":" + cf);
    LOG.info("Configuring " + job.getJobName());
    job.setJarByClass(MapReduceImporter.class);
    job.setMapperClass(org.apache.metron.dataloads.hbase.mr.BulkLoadMapper.class);
    job.setOutputFormatClass(TableOutputFormat.class);
    job.getConfiguration().set(TableOutputFormat.OUTPUT_TABLE, table);
    job.getConfiguration().set(BulkLoadMapper.COLUMN_FAMILY_KEY, cf);
    job.getConfiguration().set(BulkLoadMapper.CONFIG_KEY, extractorConfigContents);
    job.getConfiguration().set(BulkLoadMapper.CONVERTER_KEY, EnrichmentConverter.class.getName());
    job.setOutputKeyClass(ImmutableBytesWritable.class);
    job.setOutputValueClass(Put.class);
    job.setNumReduceTasks(0);//  ww  w  . ja  v a  2 s  . c  o  m
    List<Path> paths = inputs.stream().map(p -> new Path(p)).collect(Collectors.toList());
    handler.getInputFormat().set(job, paths, handler.getConfig());
    TableMapReduceUtil.initCredentials(job);
    try {
        job.waitForCompletion(true);
    } catch (Exception e) {
        throw new IllegalStateException("Unable to complete job: " + e.getMessage(), e);
    }
}

From source file:org.apache.nutch.mapreduce.NutchUtil.java

License:Apache License

public static Map<String, Object> getJobState(Job job, String... groups) {
    Map<String, Object> jobState = Maps.newHashMap();
    if (job == null) {
        return jobState;
    }/* w w w.j  a  v a  2  s  .c  om*/

    try {
        if (job.getStatus() == null || job.isRetired()) {
            return jobState;
        }
    } catch (IOException | InterruptedException e) {
        return jobState;
    }

    jobState.put("jobName", job.getJobName());
    jobState.put("jobID", job.getJobID());

    jobState.put(Nutch.STAT_COUNTERS, getJobCounters(job, groups));

    return jobState;
}

From source file:org.apache.nutch.util.ToolUtil.java

License:Apache License

@SuppressWarnings("unchecked")
public static final void recordJobStatus(String label, Job job, Map<String, Object> results) {
    Map<String, Object> jobs = (Map<String, Object>) results.get(Nutch.STAT_JOBS);
    if (jobs == null) {
        jobs = new LinkedHashMap<String, Object>();
        results.put(Nutch.STAT_JOBS, jobs);
    }//from   w  w w . j  a va 2s . c  o  m
    Map<String, Object> stats = new HashMap<String, Object>();
    Map<String, Object> countStats = new HashMap<String, Object>();
    try {
        Counters counters = job.getCounters();
        for (CounterGroup cg : counters) {
            Map<String, Object> cnts = new HashMap<String, Object>();
            countStats.put(cg.getDisplayName(), cnts);
            for (Counter c : cg) {
                cnts.put(c.getName(), c.getValue());
            }
        }
    } catch (Exception e) {
        countStats.put("error", e.toString());
    }
    stats.put(Nutch.STAT_COUNTERS, countStats);
    stats.put("jobName", job.getJobName());
    stats.put("jobID", job.getJobID());
    if (label == null) {
        label = job.getJobName();
        if (job.getJobID() != null) {
            label = label + "-" + job.getJobID();
        }
    }
    jobs.put(label, stats);
}

From source file:org.apache.parquet.avro.TestInputOutputFormat.java

License:Apache License

private void waitForJob(Job job) throws Exception {
    job.submit();//  w  ww  .  j a v  a 2 s.  c om
    while (!job.isComplete()) {
        LOG.debug("waiting for job " + job.getJobName());
        sleep(100);
    }
    LOG.info("status for job " + job.getJobName() + ": " + (job.isSuccessful() ? "SUCCESS" : "FAILURE"));
    if (!job.isSuccessful()) {
        throw new RuntimeException("job failed " + job.getJobName());
    }
}