Example usage for org.apache.hadoop.security UserGroupInformation getCurrentUser

List of usage examples for org.apache.hadoop.security UserGroupInformation getCurrentUser

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation getCurrentUser.

Prototype

@InterfaceAudience.Public
@InterfaceStability.Evolving
public static UserGroupInformation getCurrentUser() throws IOException 

Source Link

Document

Return the current user, including any doAs in the current stack.

Usage

From source file:org.apache.falcon.catalog.HiveCatalogService.java

License:Apache License

/**
 * This is used from with in an oozie job.
 *
 * @param conf conf object//www.  j a  v a 2s .c o  m
 * @param metastoreUrl metastore uri
 * @return hive metastore client handle
 * @throws FalconException
 */
private static HiveMetaStoreClient createClient(Configuration conf, String metastoreUrl)
        throws FalconException {
    try {
        LOG.info("Creating HCatalog client object for metastore {} using conf {}", metastoreUrl,
                conf.toString());
        final Credentials credentials = getCredentials(conf);
        Configuration jobConf = credentials != null ? copyCredentialsToConf(conf, credentials) : conf;
        HiveConf hcatConf = createHiveConf(jobConf, metastoreUrl);

        if (UserGroupInformation.isSecurityEnabled()) {
            hcatConf.set(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname,
                    conf.get(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname));
            hcatConf.set(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL.varname, "true");

            UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
            ugi.addCredentials(credentials); // credentials cannot be null
        }

        return new HiveMetaStoreClient(hcatConf);
    } catch (Exception e) {
        throw new FalconException("Exception creating HiveMetaStoreClient: " + e.getMessage(), e);
    }
}

From source file:org.apache.falcon.hive.util.EventUtils.java

License:Apache License

public void setupConnection() throws Exception {
    Class.forName(DRIVER_NAME);//from  w  ww.  ja  v  a2s  . c  om
    DriverManager.setLoginTimeout(TIMEOUT_IN_SECS);
    String authTokenString = ";auth=delegationToken";
    //To bypass findbugs check, need to store empty password in Properties.
    Properties password = new Properties();
    password.put("password", "");
    String user = "";

    UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
    if (currentUser != null) {
        user = currentUser.getShortUserName();
    }

    if (conf.get(HiveDRArgs.EXECUTION_STAGE.getName())
            .equalsIgnoreCase(HiveDRUtils.ExecutionStage.EXPORT.name())) {
        String authString = null;
        if (StringUtils.isNotEmpty(conf.get(HiveDRArgs.SOURCE_HIVE2_KERBEROS_PRINCIPAL.getName()))) {
            authString = authTokenString;
        }

        String connString = getSourceHS2ConnectionUrl(authString);
        sourceConnection = DriverManager.getConnection(connString, user, password.getProperty("password"));
        sourceStatement = sourceConnection.createStatement();
    } else {
        String authString = null;
        if (StringUtils.isNotEmpty(conf.get(HiveDRArgs.TARGET_HIVE2_KERBEROS_PRINCIPAL.getName()))) {
            authString = authTokenString;
        }
        String connString = getTargetHS2ConnectionUrl(authString);
        targetConnection = DriverManager.getConnection(connString, user, password.getProperty("password"));
        targetStatement = targetConnection.createStatement();
    }
}

From source file:org.apache.falcon.hive.util.HiveMetastoreUtils.java

License:Apache License

private static HiveConf createHiveConf(Configuration conf, String metastoreUrl, String metastorePrincipal,
        String hive2Principal) throws IOException {
    JobConf jobConf = new JobConf(conf);
    String delegationToken = HiveDRUtils.getFilePathFromEnv("HADOOP_TOKEN_FILE_LOCATION");
    if (delegationToken != null) {
        Credentials credentials = Credentials.readTokenStorageFile(new File(delegationToken), conf);
        jobConf.setCredentials(credentials);
        UserGroupInformation.getCurrentUser().addCredentials(credentials);
    }/*from   w  w  w  .  j a v  a2 s .c  o  m*/

    HiveConf hcatConf = new HiveConf(jobConf, HiveConf.class);

    hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, metastoreUrl);
    hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, 3);
    hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName());
    hcatConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");

    hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
    hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
    hcatConf.set(HiveConf.ConfVars.HIVE_REPL_TASK_FACTORY.varname, EximReplicationTaskFactory.class.getName());
    if (StringUtils.isNotEmpty(metastorePrincipal)) {
        hcatConf.set(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname, metastorePrincipal);
        hcatConf.set(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL.varname, "true");
        hcatConf.set(HiveConf.ConfVars.METASTORE_EXECUTE_SET_UGI.varname, "true");
        hcatConf.set("hadoop.rpc.protection", "authentication");
    }
    if (StringUtils.isNotEmpty(hive2Principal)) {
        hcatConf.set(HiveConf.ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL.varname, hive2Principal);
        hcatConf.set(HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION.varname, "kerberos");
    }

    return hcatConf;
}

From source file:org.apache.falcon.resource.channel.HTTPChannel.java

License:Apache License

@SuppressWarnings("unchecked")
@Override//from   w  w w  .ja va 2s . c  o m
public <T> T invoke(String methodName, Object... args) throws FalconException {
    HttpServletRequest incomingRequest = null;
    try {
        Method method = getMethod(service, methodName, args);
        String urlPrefix = getFalconEndPoint();
        final String url = urlPrefix + "/" + pathValue(method, args);
        LOG.debug("Executing {}", url);

        incomingRequest = getIncomingRequest(args);
        incomingRequest.getInputStream().reset();
        String httpMethod = getHttpMethod(method);
        String mimeType = getConsumes(method);
        String accept = MediaType.WILDCARD;
        final String user = CurrentUser.getUser();

        String doAsUser = incomingRequest.getParameter(DO_AS_PARAM);
        WebResource resource = getClient().resource(UriBuilder.fromUri(url).build().normalize())
                .queryParam("user.name", user);
        if (doAsUser != null) {
            resource = resource.queryParam("doAs", doAsUser);
        }

        AuthenticatedURL.Token authenticationToken = null;
        if (SecurityUtil.isSecurityEnabled()) {
            UserGroupInformation ugiLoginUser = UserGroupInformation.getCurrentUser();
            LOG.debug("Security is enabled. Using DoAs : " + ugiLoginUser.getUserName());
            authenticationToken = ugiLoginUser.doAs(new PrivilegedExceptionAction<AuthenticatedURL.Token>() {
                @Override
                public AuthenticatedURL.Token run() throws Exception {
                    return getToken(url + PseudoAuthenticator.USER_NAME + "=" + user, getClient());
                }
            });
        }

        ClientResponse response = resource.header("Cookie", AUTH_COOKIE_EQ + authenticationToken).accept(accept)
                .type(mimeType).method(httpMethod, ClientResponse.class,
                        (isPost(httpMethod) ? incomingRequest.getInputStream() : null));
        incomingRequest.getInputStream().reset();

        Family status = response.getClientResponseStatus().getFamily();
        if (status == Family.INFORMATIONAL || status == Family.SUCCESSFUL) {
            return (T) response.getEntity(method.getReturnType());
        } else if (response.getClientResponseStatus().getStatusCode() == Response.Status.BAD_REQUEST
                .getStatusCode()) {
            LOG.error("Request failed: {}", response.getClientResponseStatus().getStatusCode());
            throw FalconWebException.newAPIException(response.getEntity(APIResult.class).getMessage());
        } else {
            LOG.error("Request failed: {}", response.getClientResponseStatus().getStatusCode());
            throw new FalconException(response.getEntity(String.class));
        }
    } catch (FalconWebException falconWebException) {
        LOG.error("Request failed", falconWebException);
        throw falconWebException;
    } catch (Throwable e) {
        LOG.error("Request failed", e);
        throw new FalconException(e);
    } finally {
        try {
            if (incomingRequest != null) {
                incomingRequest.getInputStream().reset();
            }
        } catch (IOException e) {
            LOG.error("Error in HTTPChannel", e);
        }
    }
}

From source file:org.apache.falcon.security.CurrentUser.java

License:Apache License

/**
 * Dole out a UGI object for the current authenticated user if authenticated
 * else return current user.//  www . j a  v a2 s  .c o m
 *
 * @return UGI object
 * @throws java.io.IOException
 */
public static UserGroupInformation getAuthenticatedUGI() throws IOException {
    return CurrentUser.isAuthenticated() ? createProxyUGI(getAuthenticatedUser())
            : UserGroupInformation.getCurrentUser();
}

From source file:org.apache.falcon.security.CurrentUser.java

License:Apache License

/**
 * Dole out a proxy UGI object for the current authenticated user if authenticated
 * else return current user./*from  w  w w  . j av  a 2 s. c  o m*/
 *
 * @return UGI object
 * @throws java.io.IOException
 */
public static UserGroupInformation getProxyUGI() throws IOException {
    return CurrentUser.isAuthenticated() ? createProxyUGI(getUser()) : UserGroupInformation.getCurrentUser();
}

From source file:org.apache.flink.api.java.hadoop.mapred.HadoopInputFormatBase.java

License:Apache License

@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
    super.read(in);

    String hadoopInputFormatClassName = in.readUTF();
    String keyClassName = in.readUTF();
    String valueClassName = in.readUTF();
    if (jobConf == null) {
        jobConf = new JobConf();
    }// www  .  ja  va  2  s.  c om
    jobConf.readFields(in);
    try {
        this.mapredInputFormat = (org.apache.hadoop.mapred.InputFormat<K, V>) Class
                .forName(hadoopInputFormatClassName, true, Thread.currentThread().getContextClassLoader())
                .newInstance();
    } catch (Exception e) {
        throw new RuntimeException("Unable to instantiate the hadoop input format", e);
    }
    try {
        this.keyClass = (Class<K>) Class.forName(keyClassName, true,
                Thread.currentThread().getContextClassLoader());
    } catch (Exception e) {
        throw new RuntimeException("Unable to find key class.", e);
    }
    try {
        this.valueClass = (Class<V>) Class.forName(valueClassName, true,
                Thread.currentThread().getContextClassLoader());
    } catch (Exception e) {
        throw new RuntimeException("Unable to find value class.", e);
    }
    ReflectionUtils.setConf(mapredInputFormat, jobConf);

    jobConf.getCredentials().addAll(this.credentials);
    Credentials currentUserCreds = getCredentialsFromUGI(UserGroupInformation.getCurrentUser());
    if (currentUserCreds != null) {
        jobConf.getCredentials().addAll(currentUserCreds);
    }
}

From source file:org.apache.flink.api.java.hadoop.mapred.HadoopOutputFormatBase.java

License:Apache License

@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
    super.read(in);
    String hadoopOutputFormatName = in.readUTF();
    if (jobConf == null) {
        jobConf = new JobConf();
    }//from   www  . j av  a 2s  .  c om
    jobConf.readFields(in);
    try {
        this.mapredOutputFormat = (org.apache.hadoop.mapred.OutputFormat<K, V>) Class
                .forName(hadoopOutputFormatName, true, Thread.currentThread().getContextClassLoader())
                .newInstance();
    } catch (Exception e) {
        throw new RuntimeException("Unable to instantiate the hadoop output format", e);
    }
    ReflectionUtils.setConf(mapredOutputFormat, jobConf);

    jobConf.getCredentials().addAll(this.credentials);
    Credentials currentUserCreds = getCredentialsFromUGI(UserGroupInformation.getCurrentUser());
    if (currentUserCreds != null) {
        jobConf.getCredentials().addAll(currentUserCreds);
    }
}

From source file:org.apache.flink.api.java.hadoop.mapreduce.HadoopInputFormatBase.java

License:Apache License

@Override
public HadoopInputSplit[] createInputSplits(int minNumSplits) throws IOException {
    configuration.setInt("mapreduce.input.fileinputformat.split.minsize", minNumSplits);

    JobContext jobContext;//from ww w  .  j a va2s . com
    try {
        jobContext = HadoopUtils.instantiateJobContext(configuration, new JobID());
    } catch (Exception e) {
        throw new RuntimeException(e);
    }

    jobContext.getCredentials().addAll(this.credentials);
    Credentials currentUserCreds = getCredentialsFromUGI(UserGroupInformation.getCurrentUser());
    if (currentUserCreds != null) {
        jobContext.getCredentials().addAll(currentUserCreds);
    }

    List<org.apache.hadoop.mapreduce.InputSplit> splits;
    try {
        splits = this.mapreduceInputFormat.getSplits(jobContext);
    } catch (InterruptedException e) {
        throw new IOException("Could not get Splits.", e);
    }
    HadoopInputSplit[] hadoopInputSplits = new HadoopInputSplit[splits.size()];

    for (int i = 0; i < hadoopInputSplits.length; i++) {
        hadoopInputSplits[i] = new HadoopInputSplit(i, splits.get(i), jobContext);
    }
    return hadoopInputSplits;
}

From source file:org.apache.flink.api.java.hadoop.mapreduce.HadoopOutputFormatBase.java

License:Apache License

/**
 * create the temporary output file for hadoop RecordWriter.
 * @param taskNumber The number of the parallel instance.
 * @param numTasks The number of parallel tasks.
 * @throws java.io.IOException//from  ww w . jav a 2s . co  m
 */
@Override
public void open(int taskNumber, int numTasks) throws IOException {

    // enforce sequential open() calls
    synchronized (OPEN_MUTEX) {
        if (Integer.toString(taskNumber + 1).length() > 6) {
            throw new IOException("Task id too large.");
        }

        this.taskNumber = taskNumber + 1;

        // for hadoop 2.2
        this.configuration.set("mapreduce.output.basename", "tmp");

        TaskAttemptID taskAttemptID = TaskAttemptID.forName("attempt__0000_r_" + String
                .format("%" + (6 - Integer.toString(taskNumber + 1).length()) + "s", " ").replace(" ", "0")
                + Integer.toString(taskNumber + 1) + "_0");

        this.configuration.set("mapred.task.id", taskAttemptID.toString());
        this.configuration.setInt("mapred.task.partition", taskNumber + 1);
        // for hadoop 2.2
        this.configuration.set("mapreduce.task.attempt.id", taskAttemptID.toString());
        this.configuration.setInt("mapreduce.task.partition", taskNumber + 1);

        try {
            this.context = HadoopUtils.instantiateTaskAttemptContext(this.configuration, taskAttemptID);
            this.outputCommitter = this.mapreduceOutputFormat.getOutputCommitter(this.context);
            this.outputCommitter.setupJob(HadoopUtils.instantiateJobContext(this.configuration, new JobID()));
        } catch (Exception e) {
            throw new RuntimeException(e);
        }

        this.context.getCredentials().addAll(this.credentials);
        Credentials currentUserCreds = getCredentialsFromUGI(UserGroupInformation.getCurrentUser());
        if (currentUserCreds != null) {
            this.context.getCredentials().addAll(currentUserCreds);
        }

        // compatible for hadoop 2.2.0, the temporary output directory is different from hadoop 1.2.1
        if (outputCommitter instanceof FileOutputCommitter) {
            this.configuration.set("mapreduce.task.output.dir",
                    ((FileOutputCommitter) this.outputCommitter).getWorkPath().toString());
        }

        try {
            this.recordWriter = this.mapreduceOutputFormat.getRecordWriter(this.context);
        } catch (InterruptedException e) {
            throw new IOException("Could not create RecordWriter.", e);
        }
    }
}