Example usage for org.apache.hadoop.security UserGroupInformation doAs

List of usage examples for org.apache.hadoop.security UserGroupInformation doAs

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation doAs.

Prototype

@InterfaceAudience.Public
@InterfaceStability.Evolving
public <T> T doAs(PrivilegedExceptionAction<T> action) throws IOException, InterruptedException 

Source Link

Document

Run the given action as the user, potentially throwing an exception.

Usage

From source file:com.thinkbiganalytics.datalake.authorization.SentryAuthorizationService.java

License:Apache License

@Override
public void deleteHdfsPolicy(String categoryName, String feedName, List<String> hdfsPaths) {

    /**/* w  w  w .j  av  a  2 s .  co  m*/
     * Delete ACL from list of HDFS Paths
     */
    if (this.sentryConnection.getKerberosTicketConfiguration().isKerberosEnabled()) {
        try {

            UserGroupInformation ugi = authenticatePolicyCreatorWithKerberos();
            if (ugi == null) {
            } else {
                ugi.doAs(new PrivilegedExceptionAction<Void>() {
                    @Override
                    public Void run() throws Exception {
                        String allPathForAclDeletion = convertListToString(hdfsPaths, ",");
                        try {
                            sentryClientObject.flushACL(sentryConnection.getHadoopConfiguration(),
                                    allPathForAclDeletion);
                        } catch (Exception e) {
                            log.error("Unable to remove ACL from HDFS Paths" + e.getMessage());
                            throw new RuntimeException(e);
                        }

                        return null;
                    }

                });
            }
        } catch (Exception e) {
            log.error("Failed to clear HDFS ACL policy with Kerberos" + e.getMessage());
            throw new RuntimeException(e);
        }
    } else {
        String allPathForAclDeletion = convertListToString(hdfsPaths, ",");
        try {
            sentryClientObject.flushACL(sentryConnection.getHadoopConfiguration(), allPathForAclDeletion);
        } catch (Exception e) {
            log.error("Unable to remove ACL from HDFS Paths" + e.getMessage());
            throw new RuntimeException(e);
        }

    }
}

From source file:com.thinkbiganalytics.kerberos.KerberosUtil.java

License:Apache License

/**
 * Executes the specified action using the optional Kerberos ticket.
 *//*from   w w  w  .java 2 s.  com*/
public static <T> T runWithOrWithoutKerberos(@Nonnull final Callable<T> action,
        @Nonnull final KerberosTicketConfiguration kerberosTicketConfiguration) {
    try {
        if (kerberosTicketConfiguration.isKerberosEnabled()) {
            log.debug("Running action with Kerberos ticket");
            final KerberosTicketGenerator kerberosTicketGenerator = new KerberosTicketGenerator();
            final UserGroupInformation userGroupInformation = kerberosTicketGenerator
                    .generateKerberosTicket(kerberosTicketConfiguration);
            return userGroupInformation.doAs(new PrivilegedExceptionAction<T>() {
                @Override
                public T run() throws Exception {
                    return action.call();
                }
            });
        } else {
            log.debug("Running action without Kerberos");
            return action.call();
        }
    } catch (final Exception e) {
        throw Throwables.propagate(e);
    }
}

From source file:com.thinkbiganalytics.kerberos.TestKerberosKinit.java

License:Apache License

private void testHdfsWithUserImpersonation(final String configResources, final String keytab,
        final String principal, String proxyUser, final String environment, final String hdfsUrl) {
    final String path = "/user";
    try {/*  w w w  . j a  v a 2  s .  c  o m*/
        final Configuration configuration = TestKerberosKinit.createConfigurationFromList(configResources);
        UserGroupInformation realugi = TestKerberosKinit.generateKerberosTicket(configuration, keytab,
                principal);
        System.out.println(" ");
        System.out.println("Sucessfully got a kerberos ticket in the JVM");
        System.out.println("current user is: " + realugi.getUserName());

        UserGroupInformation ugiProxy = UserGroupInformation.createProxyUser(proxyUser, realugi);
        System.out.println("proxy user is: " + ugiProxy.getUserName());
        ugiProxy.doAs(new PrivilegedExceptionAction<Object>() {
            public Object run() {
                try {
                    searchHDFS(configuration, environment, path, hdfsUrl);
                } catch (Exception e) {
                    throw new RuntimeException("Error testing HDFS with Kerberos Hive Impersonation", e);
                }
                return null;
            }
        });

    } catch (Exception e) {
        System.out.println("Error testing HDFS\n\n");
        e.printStackTrace();
    }
}

From source file:com.thinkbiganalytics.kerberos.TestKerberosKinit.java

License:Apache License

private void testHdfsAsKerberosUser(final String configResources, final String keytab, final String principal,
        final String environment, final String hdfsUrl) {
    final String path = "/user";
    try {//from w w w. ja v  a 2  s.  co  m
        final Configuration configuration = TestKerberosKinit.createConfigurationFromList(configResources);
        UserGroupInformation realugi = TestKerberosKinit.generateKerberosTicket(configuration, keytab,
                principal);
        System.out.println(" ");
        System.out.println("Sucessfully got a kerberos ticket in the JVM");
        System.out.println("current user is: " + realugi.getUserName());

        realugi.doAs(new PrivilegedExceptionAction<Object>() {
            public Object run() {
                try {
                    searchHDFS(configuration, environment, path, hdfsUrl);
                } catch (Exception e) {
                    throw new RuntimeException("Error testing HDFS with Kerberos", e);
                }
                return null;
            }
        });

    } catch (Exception e) {
        System.out.println("Error testing HDFS\n\n");
        e.printStackTrace();
    }
}

From source file:com.thinkbiganalytics.kerberos.TestKerberosKinit.java

License:Apache License

private void testHiveJdbcConnection(final String configResources, final String keytab,
        final String realUserPrincipal, final String proxyUser, final String hiveHostName) throws Exception {

    final Configuration configuration = TestKerberosKinit.createConfigurationFromList(configResources);
    UserGroupInformation realugi = TestKerberosKinit.generateKerberosTicket(configuration, keytab,
            realUserPrincipal);//  w w  w.  ja  v a2s .c  om

    System.out.println(" ");
    System.out.println("Sucessfully got a kerberos ticket in the JVM");

    HiveConnection realUserConnection = (HiveConnection) realugi
            .doAs(new PrivilegedExceptionAction<Connection>() {
                public Connection run() {
                    Connection connection = null;
                    Statement stmt = null;
                    ResultSet res = null;
                    try {
                        Class.forName(DRIVER_NAME);
                        String url = hiveHostName;
                        if (proxyUser != null) {
                            url = url + ";hive.server2.proxy.user=" + proxyUser;
                        }
                        System.out.println("Hive URL: " + url);
                        connection = DriverManager.getConnection(url);

                        Class.forName(DRIVER_NAME);

                        System.out.println("creating statement");
                        stmt = connection.createStatement();

                        String sql = "show databases";
                        res = stmt.executeQuery(sql);
                        System.out.println(" \n");
                        System.out.println("Executing the Hive Query:");
                        System.out.println(" ");

                        System.out.println("List of Databases");
                        while (res.next()) {
                            System.out.println(res.getString(1));
                        }

                    } catch (Exception e) {
                        throw new RuntimeException("Error creating connection with proxy user", e);
                    } finally {
                        JdbcUtils.closeResultSet(res);
                        JdbcUtils.closeStatement(stmt);
                        JdbcUtils.closeConnection(connection);

                    }
                    return connection;
                }
            });

}

From source file:com.thinkbiganalytics.nifi.security.ApplySecurityPolicy.java

License:Apache License

protected FileSystem getFileSystemAsUser(final Configuration config, UserGroupInformation ugi)
        throws IOException {
    try {// w  w w.  jav  a2 s.  c  o m
        return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
            @Override
            public FileSystem run() throws Exception {
                return FileSystem.get(config);
            }
        });
    } catch (InterruptedException e) {
        throw new IOException("Unable to create file system: " + e.getMessage());
    }
}

From source file:com.trendmicro.hdfs.webdav.test.MiniClusterTestUtil.java

License:Apache License

public void startHDFSWebDAVServlet(UserGroupInformation gatewayUser) throws Exception {
    gatewayPort = getConfiguration().getInt("hadoop.webdav.port", DEFAULT_GATEWAY_PORT);
    while (true)// w w  w.jav a  2  s  .c om
        try {
            gatewayUser.doAs(new PrivilegedExceptionAction<Void>() {
                public Void run() throws Exception {
                    startServletServer(gatewayPort);
                    return null;
                }
            });
            break;
        } catch (Exception e) {
            LOG.info("Unable to start Jetty on port " + gatewayPort, e);
            gatewayPort++;
        }
    getConfiguration().setInt("hadoop.webdav.port", gatewayPort);
}

From source file:com.twitter.hraven.hadoopJobMonitor.rpc.ClientCache.java

License:Apache License

protected MRClientProtocol instantiateHistoryProxy() throws IOException {
    final String serviceAddr = conf.get(JHAdminConfig.MR_HISTORY_ADDRESS);
    if (StringUtils.isEmpty(serviceAddr)) {
        return null;
    }//  ww w .  ja v a2s .  c  om
    LOG.debug("Connecting to HistoryServer at: " + serviceAddr);
    final YarnRPC rpc = YarnRPC.create(conf);
    LOG.debug("Connected to HistoryServer at: " + serviceAddr);
    UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
    return currentUser.doAs(new PrivilegedAction<MRClientProtocol>() {
        @Override
        public MRClientProtocol run() {
            return (MRClientProtocol) rpc.getProxy(HSClientProtocol.class,
                    NetUtils.createSocketAddr(serviceAddr), conf);
        }
    });
}

From source file:edu.umn.cs.spatialHadoop.visualization.HadoopvizServer.java

License:Open Source License

/**
 * Visualizes a dataset.//from   ww  w . ja v a  2s .  c o  m
 * @param request
 * @param response
 */
private void handleVisualize(HttpServletRequest request, HttpServletResponse response) {
    try {
        String pathStr = request.getParameter("path");
        final Path path = new Path(pathStr);
        FileSystem fs = path.getFileSystem(commonParams);
        // Check if the input is already visualized
        final Path imagePath = new Path(path, "_data.png");
        if (fs.exists(imagePath)) {
            // Image is already visualized
            response.setStatus(HttpServletResponse.SC_MOVED_PERMANENTLY);
            response.setHeader("Location", "/hdfs" + imagePath);
        } else {
            // This dataset has never been visualized before
            String shapeName = request.getParameter("shape");
            final OperationsParams vizParams = new OperationsParams(commonParams);
            vizParams.set("shape", shapeName);
            vizParams.setBoolean("background", true);
            vizParams.setInt("width", 2000);
            vizParams.setInt("height", 2000);

            // Retrieve the owner of the data directory
            String owner = fs.getFileStatus(path).getOwner();
            UserGroupInformation ugi = UserGroupInformation.createRemoteUser(owner);
            Job vizJob = ugi.doAs(new PrivilegedExceptionAction<Job>() {
                public Job run() throws Exception {
                    return GeometricPlot.plot(new Path[] { path }, imagePath, vizParams);
                }
            });

            // Write the response
            response.setStatus(HttpServletResponse.SC_OK);
            response.setContentType("application/json;charset=utf-8");
            PrintWriter out = response.getWriter();
            out.printf("{\"JobID\":\"%s\", \"TrackURL\": \"%s\"}", vizJob.getJobID().toString(),
                    vizJob.getTrackingURL());
            out.close();
        }
    } catch (Exception e) {
        System.out.println("error happened");
        e.printStackTrace();
        try {
            e.printStackTrace(response.getWriter());
        } catch (IOException ioe) {
            ioe.printStackTrace();
            e.printStackTrace();
        }
        response.setContentType("text/plain;charset=utf-8");
        response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
    }
}

From source file:eu.stratosphere.yarn.ApplicationMaster.java

License:Apache License

public static void main(String[] args) throws Exception {
    final String yarnClientUsername = System.getenv(Client.ENV_CLIENT_USERNAME);
    LOG.info("YARN daemon runs as '" + UserGroupInformation.getCurrentUser().getShortUserName() + "' setting"
            + " user to execute Stratosphere ApplicationMaster/JobManager to '" + yarnClientUsername + "'");
    UserGroupInformation ugi = UserGroupInformation.createRemoteUser(yarnClientUsername);
    for (Token<? extends TokenIdentifier> toks : UserGroupInformation.getCurrentUser().getTokens()) {
        ugi.addToken(toks);// w w  w  .j  a  va  2 s  .  c o m
    }
    ugi.doAs(new PrivilegedAction<Object>() {
        @Override
        public Object run() {
            try {
                new ApplicationMaster().run();
            } catch (Exception e) {
                e.printStackTrace();
            }
            return null;
        }
    });
}