Example usage for org.apache.hadoop.mapreduce Job getJobName

List of usage examples for org.apache.hadoop.mapreduce Job getJobName

Introduction

In this page you can find the example usage for org.apache.hadoop.mapreduce Job getJobName.

Prototype

public String getJobName() 

Source Link

Document

The user-specified job name.

Usage

From source file:org.trend.hgraph.mapreduce.pagerank.Utils.java

License:Apache License

/**
 * Set token if authentication enabled./*from  w  w w .  j  a v  a  2s.  co  m*/
 * @param job
 */
static void setAuthenticationToken(Job job, Logger logger) {
    if (User.isSecurityEnabled()) {
        try {
            User.getCurrent().obtainAuthTokenForJob(job.getConfiguration(), job);
        } catch (IOException e) {
            String msg = job.getJobName() + ": Failed to obtain current user.";
            logger.error(msg);
            throw new IllegalStateException(msg, e);
        } catch (InterruptedException ie) {
            logger.warn(job.getJobName() + ": Interrupted obtaining user authentication token");
        }
    }
}

From source file:org.trend.hgraph.util.test.GetRandomRowsByRegions.java

License:Apache License

@Override
public int run(String[] args) throws Exception {
    if (null == args || args.length < 2) {
        System.err.println("options shall not be empty or not equal to 2");
        printUsage();//from  ww  w  . j a va2  s .  co  m
        return -1;
    }
    String cmd = null;
    int mustStartIdx = -1;
    int bypassRows = Mapper.BYPASS_ROW_SIZE_DEFAULT;
    int tarSampleSize = Mapper.TARGET_SAMPLE_SIZE_DEFAULT;
    for (int a = 0; a < args.length; a++) {
        cmd = args[a];
        if (cmd.startsWith("-")) {
            if (mustStartIdx > -1) {
                System.err.println("The option order is incorrect !!");
                printUsage();
                return -1;
            }

            if ("-b".equals(cmd) || "-bypass-row-size".equals(cmd)) {
                a++;
                cmd = args[a];
                try {
                    bypassRows = Integer.parseInt(cmd);
                } catch (NumberFormatException e) {
                    System.err.println("parsing bypass rows failed, value:" + cmd);
                    printUsage();
                    return 1;
                }
            } else if ("-t".equals(cmd) || "-target-sample-size".equals(cmd)) {
                a++;
                cmd = args[a];
                try {
                    tarSampleSize = Integer.parseInt(cmd);
                } catch (NumberFormatException e) {
                    System.err.println("parsing target sample size failed, value:" + tarSampleSize);
                    printUsage();
                    return 1;
                }
            } else {
                System.err.println("option:" + cmd + " is undefined !!");
                printUsage();
                return -1;
            }

        } else {
            if (mustStartIdx == -1) {
                mustStartIdx = a;
            }
        }
    }

    String tableName = args[mustStartIdx];
    String outputPath = args[mustStartIdx + 1];

    LOGGER.info("tableName=" + tableName);
    LOGGER.info("outputPath=" + outputPath);

    Configuration conf = this.getConf();
    conf.setInt(Mapper.BYPASS_ROW_SIZE_NAME, bypassRows);
    conf.setInt(Mapper.TARGET_SAMPLE_SIZE_NAME, tarSampleSize);
    Job job = createSubmittableJob(conf, tableName, outputPath);
    Boolean success = job.waitForCompletion(true);
    if (!success) {
        System.err.println("run job:" + job.getJobName() + " failed");
        return -1;
    }
    return 0;
}

From source file:reconcile.hbase.mapreduce.JobConfig.java

License:Open Source License

/**
 * Method to properly initialize the Mapper/Reducer jobs based on options
 *
 * @param LOG/*from ww w .  j a v a 2 s.  co  m*/
 * @param job
 * @param scan
 * @param mapClass
 * @throws IOException
 */
public void initTableMapperNoReducer(Log LOG, Job job, Scan scan, Class<? extends DocMapper<?>> mapClass)
        throws IOException {
    Preconditions.checkNotNull(scan);
    job.setJobName(job.getJobName() + "(" + argString + ")");
    if (source != null) {
        job.getConfiguration().set(SOURCE_CONF, source);
    }
    if (table != null) {
        job.getConfiguration().set(TABLE_CONF, table);
    }
    if (keyListFile != null) {
        job.getConfiguration().set(KEY_LIST_CONF, keyListFile);
    }
    if (startRow != null) {
        job.getConfiguration().set(START_ROW_CONF, startRow);
        if (scan != null) {
            scan.setStartRow(startRow.getBytes());
        }
    }
    if (stopRow != null) {
        job.getConfiguration().set(STOP_ROW_CONF, stopRow);
        if (scan != null) {
            scan.setStopRow(stopRow.getBytes());
        }
    }
    if (timeStamp != null) {
        job.getConfiguration().set(TIME_STAMP_CONF, timeStamp.toString());
        if (scan != null)
            scan.setTimeStamp(timeStamp.longValue());
    }
    if (startTime != null && stopTime != null) {
        LOG.info("Setting startTime(" + startTime + ") stopTime(" + stopTime + ")");
        job.getConfiguration().set(START_TIME_CONF, startTime.toString());
        job.getConfiguration().set(STOP_TIME_CONF, stopTime.toString());
        if (scan != null)
            scan.setTimeRange(startTime.longValue(), stopTime.longValue());
    }

    if (keyListFile == null) {
        LOG.info("Setting scan for source name(" + source + ") table(" + table + ")");
        initTableMapperForSource(job, scan, mapClass);
    } else {
        LOG.info("Operating solely on keys in HDFS file (" + keyListFile + ") source(" + source + ") table("
                + table + ")");
        initTableMapperForKeyList(job, mapClass);
    }

    StringBuffer families = new StringBuffer();
    if (scan.getFamilies() != null) {
        for (byte[] family : scan.getFamilies()) {
            families.append(Bytes.toString(family) + " ");
        }
    }
    StringBuffer other = new StringBuffer();
    if (scan.getFamilyMap() != null) {
        Map<byte[], NavigableSet<byte[]>> map = scan.getFamilyMap();
        if (map != null) {
            for (byte[] family : map.keySet()) {
                if (family == null) {
                    continue;
                }
                other.append(Bytes.toString(family) + "[");
                if (map.get(family) != null) {
                    for (byte[] qual : map.get(family)) {
                        other.append(Bytes.toString(qual) + ",");
                    }
                }
                other.append("]; ");
            }
        }
    }

    String familiesValue = families.toString();
    LOG.info("Setting scan retrieve families to (" + familiesValue + ")");
    LOG.info("Setting scan retrieve families/quals to (" + other.toString() + ")");
    job.getConfiguration().set(KeyListInputFormat.SCAN_FAMILIES, familiesValue);

    TableMapReduceUtil.initTableReducerJob(getTableName(), IdentityTableReducer.class, job);
    job.setNumReduceTasks(0);
}