Example usage for org.apache.hadoop.mapred JobContext getConfiguration

List of usage examples for org.apache.hadoop.mapred JobContext getConfiguration

Introduction

In this page you can find the example usage for org.apache.hadoop.mapred JobContext getConfiguration.

Prototype

public Configuration getConfiguration();

Source Link

Document

Return the configuration for the job.

Usage

From source file:com.asakusafw.runtime.stage.output.LegacyBridgeOutputCommitter.java

License:Apache License

private org.apache.hadoop.mapreduce.TaskAttemptContext toTaskAttemptContext(JobContext jobContext) {
    assert jobContext != null;
    // NOTE: Only in Hadoop 2.x, JobContext can be instance of TaskAttemptContext.
    if (TaskAttemptContext.class.isInstance(jobContext)) {
        return TaskAttemptContext.class.cast(jobContext);
    }/*  w  ww.  jav a2 s  .c o  m*/
    Progressable progressable = jobContext.getProgressible();
    if (progressable == null) {
        LOG.warn(MessageFormat.format("JobContext has no progressable object: {0}",
                jobContext.getClass().getName()));
    }
    if (LOG.isDebugEnabled()) {
        LOG.debug(MessageFormat.format("Progressable object is found (jobId={0}, object={1})", //$NON-NLS-1$
                jobContext.getJobID(), progressable));
    }
    TaskAttemptID id = JobCompatibility.newTaskAttemptId(JobCompatibility.newTaskId(jobContext.getJobID()));
    return JobCompatibility.newTaskAttemptContext(jobContext.getConfiguration(), id, progressable);
}

From source file:com.vertica.hivestoragehandler.VerticaOutputFormat.java

License:Apache License

/** {@inheritDoc} */
public void checkOutputSpecs(JobContext context) throws IOException {
    checkOutputSpecs(new VerticaConfiguration(context.getConfiguration()));
}

From source file:org.apache.blur.hive.BlurHiveMRLoaderOutputCommitter.java

License:Apache License

private void finishBulkJob(JobContext context, final boolean apply) throws IOException {
    final Configuration configuration = context.getConfiguration();
    PrivilegedExceptionAction<Void> action = new PrivilegedExceptionAction<Void>() {
        @Override//from  w  w  w  .j a  v a2  s.  co m
        public Void run() throws Exception {
            String workingPathStr = configuration.get(BlurConstants.BLUR_BULK_UPDATE_WORKING_PATH);
            Path workingPath = new Path(workingPathStr);
            Path tmpDir = new Path(workingPath, "tmp");
            FileSystem fileSystem = tmpDir.getFileSystem(configuration);
            String loadId = configuration.get(BlurSerDe.BLUR_MR_LOAD_ID);
            Path loadPath = new Path(tmpDir, loadId);

            if (apply) {
                Path newDataPath = new Path(workingPath, "new");
                Path dst = new Path(newDataPath, loadId);
                if (!fileSystem.rename(loadPath, dst)) {
                    LOG.error("Could not move data from src [" + loadPath + "] to dst [" + dst + "]");
                    throw new IOException(
                            "Could not move data from src [" + loadPath + "] to dst [" + dst + "]");
                }

                TableDescriptor tableDescriptor = BlurOutputFormat.getTableDescriptor(configuration);
                String connectionStr = configuration.get(BlurSerDe.BLUR_CONTROLLER_CONNECTION_STR);
                BulkTableUpdateCommand bulkTableUpdateCommand = new BulkTableUpdateCommand();
                bulkTableUpdateCommand.setAutoLoad(true);
                bulkTableUpdateCommand.setTable(tableDescriptor.getName());
                bulkTableUpdateCommand.setWaitForDataBeVisible(true);

                Configuration config = new Configuration(false);
                config.addResource(HDFS_SITE_XML);
                config.addResource(YARN_SITE_XML);
                config.addResource(MAPRED_SITE_XML);

                bulkTableUpdateCommand.addExtraConfig(config);
                if (bulkTableUpdateCommand.run(BlurClient.getClient(connectionStr)) != 0) {
                    throw new IOException("Unknown error occured duing load.");
                }
            } else {
                fileSystem.delete(loadPath, true);
            }
            return null;
        }
    };
    UserGroupInformation userGroupInformation = BlurHiveOutputFormat.getUGI(configuration);
    try {
        userGroupInformation.doAs(action);
    } catch (InterruptedException e) {
        throw new IOException(e);
    }
}

From source file:org.apache.blur.hive.BlurHiveOutputCommitter.java

License:Apache License

private void finishBulkJob(JobContext context, boolean apply) throws IOException {
    Configuration configuration = context.getConfiguration();
    String connectionStr = configuration.get(BLUR_CONTROLLER_CONNECTION_STR);
    boolean blocking = configuration.getBoolean(BLUR_BLOCKING_APPLY, false);
    Iface client = BlurClient.getClient(connectionStr);
    String bulkId = BlurHiveOutputFormat.getBulkId(configuration);
    try {//  ww w .  j  a v a 2 s  .c  o  m
        client.bulkMutateFinish(bulkId, apply, blocking);
    } catch (BlurException e) {
        throw new IOException(e);
    } catch (TException e) {
        throw new IOException(e);
    }
}

From source file:org.apache.hive.hcatalog.mapreduce.HCatMapRedUtil.java

License:Apache License

public static org.apache.hadoop.mapred.JobContext createJobContext(
        org.apache.hadoop.mapreduce.JobContext context) {
    return createJobContext((JobConf) context.getConfiguration(), context.getJobID(), Reporter.NULL);
}