Example usage for org.apache.hadoop.security UserGroupInformation createProxyUser

List of usage examples for org.apache.hadoop.security UserGroupInformation createProxyUser

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation createProxyUser.

Prototype

@InterfaceAudience.Public
@InterfaceStability.Evolving
public static UserGroupInformation createProxyUser(String user, UserGroupInformation realUser) 

Source Link

Document

Create a proxy user using username of the effective user and the ugi of the real user.

Usage

From source file:gobblin.util.ProxiedFileSystemUtils.java

License:Apache License

private static UserGroupInformation loginAndProxyAsUser(@NonNull String userNameToProxyAs,
        @NonNull String superUserName, Path superUserKeytabLocation) throws IOException {

    if (!UserGroupInformation.getLoginUser().getUserName().equals(superUserName)) {
        Preconditions.checkNotNull(superUserKeytabLocation);
        UserGroupInformation.loginUserFromKeytab(superUserName, superUserKeytabLocation.toString());
    }/*from   ww  w .  j a  va 2s.co m*/
    return UserGroupInformation.createProxyUser(userNameToProxyAs, UserGroupInformation.getLoginUser());
}

From source file:gobblin.util.ProxiedFileSystemWrapper.java

License:Apache License

/**
 * Getter for proxiedFs, using the passed parameters to create an instance of a proxiedFs.
 * @param properties// w  w w .j a  v  a2s  .c om
 * @param authType is either TOKEN or KEYTAB.
 * @param authPath is the KEYTAB location if the authType is KEYTAB; otherwise, it is the token file.
 * @param uri File system URI.
 * @throws IOException
 * @throws InterruptedException
 * @throws URISyntaxException
 * @return proxiedFs
 */
public FileSystem getProxiedFileSystem(State properties, AuthType authType, String authPath, String uri,
        final Configuration conf) throws IOException, InterruptedException, URISyntaxException {
    Preconditions.checkArgument(
            StringUtils.isNotBlank(properties.getProp(ConfigurationKeys.FS_PROXY_AS_USER_NAME)),
            "State does not contain a proper proxy user name");
    String proxyUserName = properties.getProp(ConfigurationKeys.FS_PROXY_AS_USER_NAME);
    UserGroupInformation proxyUser;
    switch (authType) {
    case KEYTAB: // If the authentication type is KEYTAB, log in a super user first before creating a proxy user.
        Preconditions.checkArgument(
                StringUtils
                        .isNotBlank(properties.getProp(ConfigurationKeys.SUPER_USER_NAME_TO_PROXY_AS_OTHERS)),
                "State does not contain a proper proxy token file name");
        String superUser = properties.getProp(ConfigurationKeys.SUPER_USER_NAME_TO_PROXY_AS_OTHERS);
        UserGroupInformation.loginUserFromKeytab(superUser, authPath);
        proxyUser = UserGroupInformation.createProxyUser(proxyUserName, UserGroupInformation.getLoginUser());
        break;
    case TOKEN: // If the authentication type is TOKEN, create a proxy user and then add the token to the user.
        proxyUser = UserGroupInformation.createProxyUser(proxyUserName, UserGroupInformation.getLoginUser());
        Optional<Token<?>> proxyToken = getTokenFromSeqFile(authPath, proxyUserName);
        if (proxyToken.isPresent()) {
            proxyUser.addToken(proxyToken.get());
        } else {
            LOG.warn("No delegation token found for the current proxy user.");
        }
        break;
    default:
        LOG.warn(
                "Creating a proxy user without authentication, which could not perform File system operations.");
        proxyUser = UserGroupInformation.createProxyUser(proxyUserName, UserGroupInformation.getLoginUser());
        break;
    }

    final URI fsURI = URI.create(uri);
    proxyUser.doAs(new PrivilegedExceptionAction<Void>() {
        @Override
        public Void run() throws IOException {
            LOG.debug("Now performing file system operations as :" + UserGroupInformation.getCurrentUser());
            proxiedFs = FileSystem.get(fsURI, conf);
            return null;
        }
    });
    return this.proxiedFs;
}

From source file:joshelser.TUGIAssumingProcessor.java

License:Apache License

@Override
public boolean process(final TProtocol inProt, final TProtocol outProt) throws TException {
    TTransport trans = inProt.getTransport();
    if (!(trans instanceof TSaslServerTransport)) {
        throw new TException("Unexpected non-SASL transport " + trans.getClass());
    }/* w  ww . java2s  .  c o  m*/
    TSaslServerTransport saslTrans = (TSaslServerTransport) trans;
    SaslServer saslServer = saslTrans.getSaslServer();
    String authId = saslServer.getAuthorizationID();
    String endUser = authId;

    UserGroupInformation clientUgi = null;
    try {
        clientUgi = UserGroupInformation.createProxyUser(endUser, UserGroupInformation.getLoginUser());
        final String remoteUser = clientUgi.getShortUserName();
        log.debug("Executing action as {}", remoteUser);
        return clientUgi.doAs(new PrivilegedExceptionAction<Boolean>() {
            @Override
            public Boolean run() {
                try {
                    return wrapped.process(inProt, outProt);
                } catch (TException te) {
                    throw new RuntimeException(te);
                }
            }
        });
    } catch (RuntimeException rte) {
        if (rte.getCause() instanceof TException) {
            log.error("Failed to invoke wrapped processor", rte.getCause());
            throw (TException) rte.getCause();
        }
        throw rte;
    } catch (InterruptedException | IOException e) {
        log.error("Failed to invoke wrapped processor", e);
        throw new RuntimeException(e);
    } finally {
        if (clientUgi != null) {
            try {
                FileSystem.closeAllForUGI(clientUgi);
            } catch (IOException exception) {
                log.error("Could not clean up file-system handles for UGI: {}", clientUgi, exception);
            }
        }
    }
}

From source file:org.apache.accumulo.server.rpc.UGIAssumingProcessor.java

License:Apache License

@Override
public boolean process(final TProtocol inProt, final TProtocol outProt) throws TException {
    TTransport trans = inProt.getTransport();
    if (!(trans instanceof TSaslServerTransport)) {
        throw new TException("Unexpected non-SASL transport " + trans.getClass() + ": " + trans);
    }/*  w ww  .j  a va  2s  .com*/
    TSaslServerTransport saslTrans = (TSaslServerTransport) trans;
    SaslServer saslServer = saslTrans.getSaslServer();
    String authId = saslServer.getAuthorizationID();
    String endUser = authId;

    SaslMechanism mechanism;
    try {
        mechanism = SaslMechanism.get(saslServer.getMechanismName());
    } catch (Exception e) {
        log.error("Failed to process RPC with SASL mechanism {}", saslServer.getMechanismName());
        throw e;
    }

    switch (mechanism) {
    case GSSAPI:
        UserGroupInformation clientUgi = UserGroupInformation.createProxyUser(endUser, loginUser);
        final String remoteUser = clientUgi.getUserName();

        try {
            // Set the principal in the ThreadLocal for access to get authorizations
            rpcPrincipal.set(remoteUser);

            return wrapped.process(inProt, outProt);
        } finally {
            // Unset the principal after we're done using it just to be sure that it's not incorrectly
            // used in the same thread down the line.
            rpcPrincipal.set(null);
        }
    case DIGEST_MD5:
        // The CallbackHandler, after deserializing the TokenIdentifier in the name, has already updated
        // the rpcPrincipal for us. We don't need to do it again here.
        try {
            rpcMechanism.set(mechanism);
            return wrapped.process(inProt, outProt);
        } finally {
            // Unset the mechanism after we're done using it just to be sure that it's not incorrectly
            // used in the same thread down the line.
            rpcMechanism.set(null);
        }
    default:
        throw new IllegalArgumentException("Cannot process SASL mechanism " + mechanism);
    }
}

From source file:org.apache.ambari.view.filebrowser.HdfsApi.java

License:Apache License

/**
 * Constructor/*from  w w  w.  j av a 2s.com*/
 * @param defaultFs hdfs uri
 * @param params map of parameters
 * @throws IOException
 * @throws InterruptedException
 */
public HdfsApi(final String defaultFs, String username, Map<String, String> params)
        throws IOException, InterruptedException {
    this.params = params;
    conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
    conf.set("fs.webhdfs.impl", "org.apache.hadoop.hdfs.web.WebHdfsFileSystem");
    conf.set("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem");

    ugi = UserGroupInformation.createProxyUser(username, getProxyUser());

    fs = ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
        public FileSystem run() throws IOException {
            return FileSystem.get(URI.create(defaultFs), conf);
        }
    });
}

From source file:org.apache.ambari.view.hive.utils.HdfsApi.java

License:Apache License

/**
 * Constructor/*w  w w  .j a  v  a2  s  . c  om*/
 * @param defaultFs hdfs uri
 * @param username user.name
 * @throws java.io.IOException
 * @throws InterruptedException
 */
public HdfsApi(final String defaultFs, String username, Map<String, String> params)
        throws IOException, InterruptedException {
    this.params = params;
    Thread.currentThread().setContextClassLoader(null);
    conf.set("fs.hdfs.impl", DistributedFileSystem.class.getName());
    conf.set("fs.webhdfs.impl", WebHdfsFileSystem.class.getName());
    conf.set("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem");
    ugi = UserGroupInformation.createProxyUser(username, getProxyUser());
    fs = ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
        public FileSystem run() throws IOException {
            return FileSystem.get(URI.create(defaultFs), conf);
        }
    });
}

From source file:org.apache.ambari.view.pig.utils.HdfsApi.java

License:Apache License

/**
 * Constructor/*from ww  w .j ava 2 s. co  m*/
 * @param defaultFs hdfs uri
 * @param username user.name
 * @param params map of parameters
 * @throws IOException
 * @throws InterruptedException
 */
public HdfsApi(final String defaultFs, String username, Map<String, String> params)
        throws IOException, InterruptedException {
    this.params = params;

    Thread.currentThread().setContextClassLoader(null);
    conf.set("fs.hdfs.impl", DistributedFileSystem.class.getName());
    conf.set("fs.webhdfs.impl", WebHdfsFileSystem.class.getName());
    conf.set("fs.file.impl", "org.apache.hadoop.fs.LocalFileSystem");
    ugi = UserGroupInformation.createProxyUser(username, getProxyUser());
    fs = ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
        public FileSystem run() throws IOException {
            return FileSystem.get(URI.create(defaultFs), conf);
        }
    });
}

From source file:org.apache.ambari.view.slider.SliderAppsViewControllerImpl.java

License:Apache License

private <T> T invokeHDFSClientRunnable(final HDFSClientRunnable<T> runnable,
        final Map<String, String> hadoopConfigs) throws IOException, InterruptedException {
    ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader();
    Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
    try {/*from   w  ww.  j a v  a2 s.c  o  m*/
        boolean securityEnabled = Boolean.valueOf(hadoopConfigs.get("security_enabled"));
        final HdfsConfiguration hdfsConfiguration = new HdfsConfiguration();
        for (Entry<String, String> entry : hadoopConfigs.entrySet()) {
            hdfsConfiguration.set(entry.getKey(), entry.getValue());
        }
        UserGroupInformation.setConfiguration(hdfsConfiguration);
        UserGroupInformation sliderUser;
        String loggedInUser = getUserToRunAs(hadoopConfigs);
        if (securityEnabled) {
            String viewPrincipal = getViewParameterValue(PARAM_VIEW_PRINCIPAL);
            String viewPrincipalKeytab = getViewParameterValue(PARAM_VIEW_PRINCIPAL_KEYTAB);
            UserGroupInformation ambariUser = UserGroupInformation
                    .loginUserFromKeytabAndReturnUGI(viewPrincipal, viewPrincipalKeytab);
            if (loggedInUser.equals(ambariUser.getShortUserName())) {
                // HDFS throws exception when caller tries to impresonate themselves.
                // User: admin@EXAMPLE.COM is not allowed to impersonate admin
                sliderUser = ambariUser;
            } else {
                sliderUser = UserGroupInformation.createProxyUser(loggedInUser, ambariUser);
            }
        } else {
            sliderUser = UserGroupInformation.getBestUGI(null, loggedInUser);
        }
        try {
            T value = sliderUser.doAs(new PrivilegedExceptionAction<T>() {
                @Override
                public T run() throws Exception {
                    String fsPath = hadoopConfigs.get("fs.defaultFS");
                    FileSystem fs = FileSystem.get(URI.create(fsPath), hdfsConfiguration);
                    try {
                        return runnable.run(fs);
                    } finally {
                        fs.close();
                    }
                }
            });
            return value;
        } catch (UndeclaredThrowableException e) {
            throw e;
        }
    } finally {
        Thread.currentThread().setContextClassLoader(currentClassLoader);
    }
}

From source file:org.apache.ambari.view.slider.SliderAppsViewControllerImpl.java

License:Apache License

private <T> T invokeSliderClientRunnable(final SliderClientContextRunnable<T> runnable)
        throws IOException, InterruptedException, YarnException {
    ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader();
    Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
    try {/*w  ww  . j  a va  2  s .c  o  m*/
        boolean securityEnabled = Boolean.valueOf(getHadoopConfigs().get("security_enabled"));
        UserGroupInformation.setConfiguration(getSliderClientConfiguration());
        UserGroupInformation sliderUser;
        String loggedInUser = getUserToRunAs();
        if (securityEnabled) {
            String viewPrincipal = getViewParameterValue(PARAM_VIEW_PRINCIPAL);
            String viewPrincipalKeytab = getViewParameterValue(PARAM_VIEW_PRINCIPAL_KEYTAB);
            UserGroupInformation ambariUser = UserGroupInformation
                    .loginUserFromKeytabAndReturnUGI(viewPrincipal, viewPrincipalKeytab);
            if (loggedInUser.equals(ambariUser.getShortUserName())) {
                // HDFS throws exception when caller tries to impresonate themselves.
                // User: admin@EXAMPLE.COM is not allowed to impersonate admin
                sliderUser = ambariUser;
            } else {
                sliderUser = UserGroupInformation.createProxyUser(loggedInUser, ambariUser);
            }
        } else {
            sliderUser = UserGroupInformation.getBestUGI(null, loggedInUser);
        }
        try {
            T value = sliderUser.doAs(new PrivilegedExceptionAction<T>() {
                @Override
                public T run() throws Exception {
                    final SliderClient sliderClient = createSliderClient();
                    try {
                        return runnable.run(sliderClient);
                    } finally {
                        destroySliderClient(sliderClient);
                    }
                }
            });
            return value;
        } catch (UndeclaredThrowableException e) {
            Throwable cause = e.getCause();
            if (cause instanceof YarnException) {
                YarnException ye = (YarnException) cause;
                throw ye;
            }
            throw e;
        }
    } finally {
        Thread.currentThread().setContextClassLoader(currentClassLoader);
    }
}

From source file:org.apache.ambari.view.utils.hdfs.HdfsApi.java

License:Apache License

/**
 * Constructor/*ww  w .  j a va 2s  . co m*/
 * @param configurationBuilder hdfs configuration builder
 * @throws IOException
 * @throws InterruptedException
 */
public HdfsApi(ConfigurationBuilder configurationBuilder, String username)
        throws IOException, InterruptedException, HdfsApiException {
    this.authParams = configurationBuilder.buildAuthenticationConfig();
    conf = configurationBuilder.buildConfig();
    ugi = UserGroupInformation.createProxyUser(username, getProxyUser());

    fs = execute(new PrivilegedExceptionAction<FileSystem>() {
        public FileSystem run() throws IOException {
            return FileSystem.get(conf);
        }
    });
}