Example usage for org.apache.hadoop.security UserGroupInformation doAs

List of usage examples for org.apache.hadoop.security UserGroupInformation doAs

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation doAs.

Prototype

@InterfaceAudience.Public
@InterfaceStability.Evolving
public <T> T doAs(PrivilegedExceptionAction<T> action) throws IOException, InterruptedException 

Source Link

Document

Run the given action as the user, potentially throwing an exception.

Usage

From source file:org.apache.sentry.service.thrift.HiveSimpleConnectionFactory.java

License:Apache License

/**
 * Connect to HMS in unsecure mode or in Kerberos mode according to config.
 *
 * @return HMS connection/*from w  w w  .jav a  2  s .c  om*/
 * @throws IOException          if could not establish connection
 * @throws InterruptedException if connection was interrupted
 * @throws MetaException        if other errors happened
 */
public HMSClient connect() throws IOException, InterruptedException, MetaException {
    UserGroupInformation clientUGI;

    if (insecure) {
        clientUGI = UserGroupInformation.getCurrentUser();
    } else {
        clientUGI = UserGroupInformation.getUGIFromSubject(kerberosContext.getSubject());
    }
    return new HMSClient(clientUGI.doAs(new PrivilegedExceptionAction<HiveMetaStoreClient>() {
        @Override
        public HiveMetaStoreClient run() throws MetaException {
            return new HiveMetaStoreClient(hiveConf);
        }
    }));
}

From source file:org.apache.sentry.tests.e2e.hdfs.TestDbHdfsBase.java

License:Apache License

private FileSystem getFS(UserGroupInformation ugi) throws Exception {
    return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
        public FileSystem run() throws Exception {
            Configuration conf = new Configuration();
            return FileSystem.get(conf);
        }/* w  w w  .  ja v  a2  s .  c  o  m*/
    });
}

From source file:org.apache.sentry.tests.e2e.hdfs.TestHDFSIntegration.java

License:Apache License

private void verifyHDFSandMR(Statement stmt) throws Throwable {
    // hbase user should not be allowed to read...
    UserGroupInformation hbaseUgi = UserGroupInformation.createUserForTesting("hbase",
            new String[] { "hbase" });
    hbaseUgi.doAs(new PrivilegedExceptionAction<Void>() {
        @Override/*from w w  w . jav  a 2  s  . c  o  m*/
        public Void run() throws Exception {
            try {
                miniDFS.getFileSystem().open(new Path("/user/hive/warehouse/p1/month=1/day=1/f1.txt"));
                Assert.fail("Should not be allowed !!");
            } catch (Exception e) {
                Assert.assertEquals("Wrong Error : " + e.getMessage(), true,
                        e.getMessage().contains("Permission denied: user=hbase"));
            }
            return null;
        }
    });

    // WordCount should fail..
    // runWordCount(new JobConf(miniMR.getConfig()), "/user/hive/warehouse/p1/month=1/day=1", "/tmp/wc_out");

    stmt.execute("grant select on table p1 to role p1_admin");

    verifyOnAllSubDirs("/user/hive/warehouse/p1", FsAction.READ_EXECUTE, "hbase", true);
    // hbase user should now be allowed to read...
    hbaseUgi.doAs(new PrivilegedExceptionAction<Void>() {
        @Override
        public Void run() throws Exception {
            Path p = new Path("/user/hive/warehouse/p1/month=2/day=2/f2.txt");
            BufferedReader in = new BufferedReader(new InputStreamReader(miniDFS.getFileSystem().open(p)));
            String line = null;
            List<String> lines = new ArrayList<String>();
            do {
                line = in.readLine();
                if (line != null) {
                    lines.add(line);
                }
            } while (line != null);
            Assert.assertEquals(3, lines.size());
            in.close();
            return null;
        }
    });

}

From source file:org.apache.sentry.tests.e2e.hdfs.TestHDFSIntegrationBase.java

License:Apache License

protected void verifyHDFSandMR(Statement stmt) throws Throwable {
    // hbase user should not be allowed to read...
    UserGroupInformation hbaseUgi = UserGroupInformation.createUserForTesting("hbase",
            new String[] { "hbase" });
    hbaseUgi.doAs(new PrivilegedExceptionAction<Void>() {
        @Override/*from  w ww . jav a2  s .  c o m*/
        public Void run() throws Exception {
            try {
                miniDFS.getFileSystem().open(new Path("/user/hive/warehouse/p1/month=1/day=1/f1.txt"));
                Assert.fail("Should not be allowed !!");
            } catch (Exception e) {
                Assert.assertEquals("Wrong Error : " + e.getMessage(), true,
                        e.getMessage().contains("Permission denied: user=hbase"));
            }
            return null;
        }
    });

    // WordCount should fail..
    // runWordCount(new JobConf(miniMR.getConfig()), "/user/hive/warehouse/p1/month=1/day=1", "/tmp/wc_out");

    stmt.execute("grant select on table p1 to role p1_admin");

    verifyGroupPermOnAllSubDirs("/user/hive/warehouse/p1", FsAction.READ_EXECUTE, "hbase", true);
    // hbase user should now be allowed to read...
    hbaseUgi.doAs(new PrivilegedExceptionAction<Void>() {
        @Override
        public Void run() throws Exception {
            Path p = new Path("/user/hive/warehouse/p1/month=2/day=2/f2.txt");
            BufferedReader in = new BufferedReader(new InputStreamReader(miniDFS.getFileSystem().open(p)));
            String line = null;
            List<String> lines = new ArrayList<String>();
            do {
                line = in.readLine();
                if (line != null) {
                    lines.add(line);
                }
            } while (line != null);
            Assert.assertEquals(3, lines.size());
            in.close();
            return null;
        }
    });

}

From source file:org.apache.sentry.tests.e2e.hive.Context.java

License:Apache License

public HiveMetaStoreClient getMetaStoreClient(String userName) throws Exception {
    UserGroupInformation clientUgi = UserGroupInformation.createRemoteUser(userName);
    HiveMetaStoreClient client = null;/*from  w w w.  j a  v a 2 s  .c  om*/
    try {
        client = clientUgi.doAs(new PrivilegedExceptionAction<HiveMetaStoreClient>() {
            @Override
            public HiveMetaStoreClient run() throws Exception {
                return new HiveMetaStoreClient(new HiveConf());
            }
        });
    } catch (Throwable e) {
        // The metastore may don't finish the initialization, wait for 10s for the
        // initialization.
        Thread.sleep(10 * 1000);
        client = clientUgi.doAs(new PrivilegedExceptionAction<HiveMetaStoreClient>() {
            @Override
            public HiveMetaStoreClient run() throws Exception {
                return new HiveMetaStoreClient(new HiveConf());
            }
        });
    }
    return client;
}

From source file:org.apache.sentry.tests.e2e.hive.Context.java

License:Apache License

public PigServer getPigServer(String userName, final ExecType exType) throws Exception {
    UserGroupInformation clientUgi = UserGroupInformation.createRemoteUser(userName);
    PigServer pigServer = (PigServer) clientUgi.doAs(new PrivilegedExceptionAction<Object>() {
        @Override//from   w  w w  . j  a v  a  2s  .  co m
        public PigServer run() throws Exception {
            return new PigServer(exType, new HiveConf());
        }
    });
    return pigServer;
}

From source file:org.apache.sentry.tests.e2e.hive.fs.TestFSBase.java

License:Apache License

protected FileSystem getFS(UserGroupInformation ugi) throws Exception {
    return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
        public FileSystem run() throws Exception {
            Configuration conf = new Configuration();
            conf.set(TestFSContants.SENTRY_E2E_TEST_SECURITY_AUTH, authenticationType);
            return FileSystem.get(conf);
        }//from  w w  w . ja va  2 s.  c om
    });
}

From source file:org.apache.sentry.tests.e2e.metastore.AbstractMetastoreTestWithStaticConfiguration.java

License:Apache License

public void execHiveSQLwithOverlay(final String sqlStmt, final String userName, Map<String, String> overLay)
        throws Exception {
    final HiveConf hiveConf = new HiveConf();
    for (Map.Entry<String, String> entry : overLay.entrySet()) {
        hiveConf.set(entry.getKey(), entry.getValue());
    }//from www.  j  a va 2s .  c o m
    UserGroupInformation clientUgi = UserGroupInformation.createRemoteUser(userName);
    clientUgi.doAs(new PrivilegedExceptionAction<Object>() {
        @Override
        public Void run() throws Exception {
            Driver driver = new Driver(hiveConf, userName);
            SessionState.start(new CliSessionState(hiveConf));
            CommandProcessorResponse cpr = driver.run(sqlStmt);
            if (cpr.getResponseCode() != 0) {
                throw new IOException("Failed to execute \"" + sqlStmt + "\". Driver returned "
                        + cpr.getResponseCode() + " Error: " + cpr.getErrorMessage());
            }
            driver.close();
            SessionState.get().close();
            return null;
        }
    });
}

From source file:org.apache.sentry.tests.e2e.metastore.AbstractMetastoreTestWithStaticConfiguration.java

License:Apache License

public void execPigLatin(String userName, final PigServer pigServer, final String pigLatin) throws Exception {
    UserGroupInformation clientUgi = UserGroupInformation.createRemoteUser(userName);
    clientUgi.doAs(new PrivilegedExceptionAction<Object>() {
        @Override/*from   ww w.j  ava2  s  . co m*/
        public Void run() throws Exception {
            pigServer.registerQuery(pigLatin);
            return null;
        }
    });
}

From source file:org.apache.sentry.tests.e2e.metastore.TestMetastoreEndToEnd.java

License:Apache License

@Test
public void testListDatabases() throws Exception {
    List<String> dbNames;
    HashSet<String> allDatabaseNames = new HashSet<>(Arrays.asList("default", dbName, dbName2));

    // Create databases and verify the admin can list the database names
    final HiveMetaStoreClient client = context.getMetaStoreClient(ADMIN1);
    dropAllMetastoreDBIfExists(client, false);
    createMetastoreDB(client, dbName);/*from w  w  w  . j ava  2s . co  m*/
    createMetastoreDB(client, dbName2);
    UserGroupInformation clientUgi = UserGroupInformation.createRemoteUser(ADMIN1);
    dbNames = clientUgi.doAs(new PrivilegedExceptionAction<List<String>>() {
        @Override
        public List<String> run() throws Exception {
            return client.getAllDatabases();
        }
    });
    assertThat(dbNames).isNotNull();
    verifyReturnedList(allDatabaseNames, dbNames, true);
    client.close();

    // Verify a user with ALL privileges on a database can get its name
    // and cannot get database name that has no privilege on
    // USER1_1 has ALL at dbName
    final HiveMetaStoreClient client_USER1_1 = context.getMetaStoreClient(USER1_1);
    UserGroupInformation clientUgi_USER1_1 = UserGroupInformation.createRemoteUser(USER1_1);
    dbNames = clientUgi_USER1_1.doAs(new PrivilegedExceptionAction<List<String>>() {
        @Override
        public List<String> run() throws Exception {
            return client_USER1_1.getAllDatabases();
        }
    });
    assertThat(dbNames).isNotNull();
    verifyReturnedList(new HashSet<>(Arrays.asList("default", dbName)), dbNames, true);
    client_USER1_1.close();

    // USER2_1 has SELECT at dbName
    final HiveMetaStoreClient client_USER2_1 = context.getMetaStoreClient(USER2_1);
    UserGroupInformation clientUgi_USER2_1 = UserGroupInformation.createRemoteUser(USER2_1);
    dbNames = clientUgi_USER2_1.doAs(new PrivilegedExceptionAction<List<String>>() {
        @Override
        public List<String> run() throws Exception {
            return client_USER2_1.getAllDatabases();
        }
    });
    assertThat(dbNames).isNotNull();
    verifyReturnedList(new HashSet<>(Arrays.asList("default", dbName)), dbNames, true);
    //assertThat(dbNames.get(0)).isEqualToIgnoringCase(dbName);
    client.close();

    // USER3_1 has SELECT at dbName.tabName1 and dbName.tabName2
    final HiveMetaStoreClient client_USER3_1 = context.getMetaStoreClient(USER3_1);
    UserGroupInformation clientUgi_USER3_1 = UserGroupInformation.createRemoteUser(USER3_1);
    dbNames = clientUgi_USER3_1.doAs(new PrivilegedExceptionAction<List<String>>() {
        @Override
        public List<String> run() throws Exception {
            return client_USER3_1.getAllDatabases();
        }
    });
    assertThat(dbNames).isNotNull();
    verifyReturnedList(new HashSet<>(Arrays.asList("default", dbName)), dbNames, true);
    client.close();

    // USER4_1 has ALL at dbName.tabName1 and dbName2
    final HiveMetaStoreClient client_USER4_1 = context.getMetaStoreClient(USER4_1);
    UserGroupInformation clientUgi_USER4_1 = UserGroupInformation.createRemoteUser(USER4_1);
    dbNames = clientUgi_USER4_1.doAs(new PrivilegedExceptionAction<List<String>>() {
        @Override
        public List<String> run() throws Exception {
            return client_USER4_1.getAllDatabases();
        }
    });
    assertThat(dbNames).isNotNull();
    verifyReturnedList(allDatabaseNames, dbNames, true);
    client.close();

    // USER5_1 has CREATE at server
    final HiveMetaStoreClient client_USER5_1 = context.getMetaStoreClient(USER5_1);
    UserGroupInformation clientUgi_USER5_1 = UserGroupInformation.createRemoteUser(USER5_1);
    dbNames = clientUgi_USER5_1.doAs(new PrivilegedExceptionAction<List<String>>() {
        @Override
        public List<String> run() throws Exception {
            return client_USER5_1.getAllDatabases();
        }
    });
    assertThat(dbNames).isNotNull();
    verifyReturnedList(allDatabaseNames, dbNames, true);
    client.close();
}