Example usage for org.apache.hadoop.security UserGroupInformation createUserForTesting

List of usage examples for org.apache.hadoop.security UserGroupInformation createUserForTesting

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation createUserForTesting.

Prototype

@InterfaceAudience.Public
@InterfaceStability.Evolving
public static UserGroupInformation createUserForTesting(String user, String[] userGroups) 

Source Link

Document

Create a UGI for testing HDFS and MapReduce

Usage

From source file:org.apache.sentry.hdfs.TestSentryINodeAttributesProvider.java

License:Apache License

@Before
public void setUp() throws Exception {
    admin = UserGroupInformation.createUserForTesting(System.getProperty("user.name"),
            new String[] { "supergroup" });
    admin.doAs(new PrivilegedExceptionAction<Void>() {
        @Override/*from   w  w w.j  a  va2s . co  m*/
        public Void run() throws Exception {
            System.setProperty(MiniDFSCluster.PROP_TEST_BUILD_DATA, "target/test/data");
            Configuration conf = new HdfsConfiguration();
            conf.setBoolean("sentry.authorization-provider.include-hdfs-authz-as-acl", true);
            conf.set(DFSConfigKeys.DFS_NAMENODE_INODE_ATTRIBUTES_PROVIDER_KEY,
                    MockSentryINodeAttributesProvider.class.getName());
            conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, true);
            EditLogFileOutputStream.setShouldSkipFsyncForTesting(true);
            miniDFS = new MiniDFSCluster.Builder(conf).build();
            return null;
        }
    });
}

From source file:org.apache.sentry.tests.e2e.hdfs.TestDbHdfsBase.java

License:Apache License

private static void createGgis() throws Exception {
    if (dfsType.equals(DFSType.MiniDFS.name())) {
        adminUgi = UserGroupInformation.createUserForTesting(System.getProperty("user.name"),
                new String[] { "supergroup" });
        hiveUgi = UserGroupInformation.createUserForTesting("hive", new String[] { "hive" });
    } else if (dfsType.equals(DFSType.ClusterDFS.name())) {
        adminUgi = UserGroupInformation.loginUserFromKeytabAndReturnUGI("hdfs",
                KEYTAB_LOCATION + "/hdfs.keytab");
        hiveUgi = UserGroupInformation.loginUserFromKeytabAndReturnUGI("hive",
                KEYTAB_LOCATION + "/hive.keytab");
    }//  ww w  .  jav  a  2 s.co  m
}

From source file:org.apache.sentry.tests.e2e.hdfs.TestHDFSIntegration.java

License:Apache License

@BeforeClass
public static void setup() throws Exception {
    Class.forName("org.apache.hive.jdbc.HiveDriver");
    baseDir = Files.createTempDir();
    policyFileLocation = new File(baseDir, HiveServerFactory.AUTHZ_PROVIDER_FILENAME);
    PolicyFile policyFile = PolicyFile.setAdminOnServer1("hive")
            .setUserGroupMapping(StaticUserGroup.getStaticMapping());
    policyFile.write(policyFileLocation);

    adminUgi = UserGroupInformation.createUserForTesting(System.getProperty("user.name"),
            new String[] { "supergroup" });

    hiveUgi = UserGroupInformation.createUserForTesting("hive", new String[] { "hive" });

    // Create SentryService and its internal objects.
    // Set Sentry port
    createSentry();//from  ww w .j  a  va2  s . c om

    // Create hive-site.xml that contains the metastore uri
    // it is used by HMSFollower
    configureHiveAndMetastoreForSentry();

    // Start SentryService after Hive configuration hive-site.xml is available
    // So HMSFollower can contact metastore using its URI
    startSentry();

    // Start HDFS and MR with Sentry Port. Set fsURI
    startDFSandYARN();

    // Configure Hive and Metastore with Sentry Port and fsURI
    // Read src/test/resources/sentry-site.xml.
    // Create hive-site.xml and sentry-site.xml used by Hive.
    HiveConf hiveConf = configureHiveAndMetastore();

    // Start Hive and Metastore after SentryService is started
    startHiveAndMetastore(hiveConf);
}

From source file:org.apache.sentry.tests.e2e.hdfs.TestHDFSIntegration.java

License:Apache License

private void verifyAccessToPath(String user, String group, String path, boolean hasPermission)
        throws Exception {
    Path p = new Path(path);
    UserGroupInformation hadoopUser = UserGroupInformation.createUserForTesting(user, new String[] { group });
    FileSystem fs = DFSTestUtil.getFileSystemAs(hadoopUser, hadoopConf);
    try {/*  w  ww .j  a va 2 s . c  o m*/
        fs.listFiles(p, true);
        if (!hasPermission) {
            Assert.assertFalse("Expected listing files to fail", false);
        }
    } catch (Exception e) {
        if (hasPermission) {
            throw e;
        }
    }
}

From source file:org.apache.sentry.tests.e2e.hdfs.TestHDFSIntegration.java

License:Apache License

private void verifyHDFSandMR(Statement stmt) throws Throwable {
    // hbase user should not be allowed to read...
    UserGroupInformation hbaseUgi = UserGroupInformation.createUserForTesting("hbase",
            new String[] { "hbase" });
    hbaseUgi.doAs(new PrivilegedExceptionAction<Void>() {
        @Override//  w  w  w . j  a  v a 2s . co m
        public Void run() throws Exception {
            try {
                miniDFS.getFileSystem().open(new Path("/user/hive/warehouse/p1/month=1/day=1/f1.txt"));
                Assert.fail("Should not be allowed !!");
            } catch (Exception e) {
                Assert.assertEquals("Wrong Error : " + e.getMessage(), true,
                        e.getMessage().contains("Permission denied: user=hbase"));
            }
            return null;
        }
    });

    // WordCount should fail..
    // runWordCount(new JobConf(miniMR.getConfig()), "/user/hive/warehouse/p1/month=1/day=1", "/tmp/wc_out");

    stmt.execute("grant select on table p1 to role p1_admin");

    verifyOnAllSubDirs("/user/hive/warehouse/p1", FsAction.READ_EXECUTE, "hbase", true);
    // hbase user should now be allowed to read...
    hbaseUgi.doAs(new PrivilegedExceptionAction<Void>() {
        @Override
        public Void run() throws Exception {
            Path p = new Path("/user/hive/warehouse/p1/month=2/day=2/f2.txt");
            BufferedReader in = new BufferedReader(new InputStreamReader(miniDFS.getFileSystem().open(p)));
            String line = null;
            List<String> lines = new ArrayList<String>();
            do {
                line = in.readLine();
                if (line != null) {
                    lines.add(line);
                }
            } while (line != null);
            Assert.assertEquals(3, lines.size());
            in.close();
            return null;
        }
    });

}

From source file:org.apache.sentry.tests.e2e.hdfs.TestHDFSIntegrationBase.java

License:Apache License

protected void verifyHDFSandMR(Statement stmt) throws Throwable {
    // hbase user should not be allowed to read...
    UserGroupInformation hbaseUgi = UserGroupInformation.createUserForTesting("hbase",
            new String[] { "hbase" });
    hbaseUgi.doAs(new PrivilegedExceptionAction<Void>() {
        @Override/*from w  w  w  . j a  v a  2 s  .  c o  m*/
        public Void run() throws Exception {
            try {
                miniDFS.getFileSystem().open(new Path("/user/hive/warehouse/p1/month=1/day=1/f1.txt"));
                Assert.fail("Should not be allowed !!");
            } catch (Exception e) {
                Assert.assertEquals("Wrong Error : " + e.getMessage(), true,
                        e.getMessage().contains("Permission denied: user=hbase"));
            }
            return null;
        }
    });

    // WordCount should fail..
    // runWordCount(new JobConf(miniMR.getConfig()), "/user/hive/warehouse/p1/month=1/day=1", "/tmp/wc_out");

    stmt.execute("grant select on table p1 to role p1_admin");

    verifyGroupPermOnAllSubDirs("/user/hive/warehouse/p1", FsAction.READ_EXECUTE, "hbase", true);
    // hbase user should now be allowed to read...
    hbaseUgi.doAs(new PrivilegedExceptionAction<Void>() {
        @Override
        public Void run() throws Exception {
            Path p = new Path("/user/hive/warehouse/p1/month=2/day=2/f2.txt");
            BufferedReader in = new BufferedReader(new InputStreamReader(miniDFS.getFileSystem().open(p)));
            String line = null;
            List<String> lines = new ArrayList<String>();
            do {
                line = in.readLine();
                if (line != null) {
                    lines.add(line);
                }
            } while (line != null);
            Assert.assertEquals(3, lines.size());
            in.close();
            return null;
        }
    });

}

From source file:org.apache.sentry.tests.e2e.hdfs.TestHDFSIntegrationBase.java

License:Apache License

@BeforeClass
public static void setup() throws Exception {
    Class.forName("org.apache.hive.jdbc.HiveDriver");
    baseDir = Files.createTempDir();
    policyFileLocation = new File(baseDir, HiveServerFactory.AUTHZ_PROVIDER_FILENAME);
    PolicyFile policyFile = PolicyFile.setAdminOnServer1("hive")
            .setUserGroupMapping(StaticUserGroup.getStaticMapping());
    policyFile.write(policyFileLocation);

    adminUgi = UserGroupInformation.createUserForTesting(System.getProperty("user.name"),
            new String[] { "supergroup" });

    hiveUgi = UserGroupInformation.createUserForTesting("hive", new String[] { "hive" });

    sentryUgi = UserGroupInformation.createUserForTesting("sentry", new String[] { "sentry" });

    // Create SentryService and its internal objects.
    // Set Sentry port
    createSentry();/*from w ww  .  j a va2 s .com*/

    // Create hive-site.xml that contains the metastore uri
    // it is used by HMSFollower
    configureHiveAndMetastoreForSentry();

    // Start SentryService after Hive configuration hive-site.xml is available
    // So HMSFollower can contact metastore using its URI
    startSentry();

    // Start HDFS and MR with Sentry Port. Set fsURI
    startDFSandYARN();

    // Configure Hive and Metastore with Sentry Port and fsURI
    // Read src/test/resources/sentry-site.xml.
    // Create hive-site.xml and sentry-site.xml used by Hive.
    HiveConf hiveConf = configureHiveAndMetastore();

    // Start Hive and Metastore after SentryService is started
    startHiveAndMetastore(hiveConf);
}

From source file:org.apache.sentry.tests.e2e.solr.db.integration.AbstractSolrSentryTestWithDbProvider.java

License:Apache License

public static void configureWithSolr() throws Exception {
    conf.set(ServerConfig.SECURITY_USE_UGI_TRANSPORT, "true");
    //save configuration to sentry-site.xml
    conf.writeXml(new FileOutputStream(sentrySitePath));
    setSystemProperties();/* w w w.j  ava2 s  .  c  o m*/
    extraRequestFilters = new TreeMap<Class, String>(new Comparator<Class>() {
        // There's only one class, make this as simple as possible
        @Override
        public int compare(Class o1, Class o2) {
            return 0;
        }

        @Override
        public boolean equals(Object obj) {
            return true;
        }
    });
    extraRequestFilters.put(ModifiableUserAuthenticationFilter.class, "*");

    //set the solr for the loginUser and belongs to solr group
    addGroupsToUser("solr", "solr");
    UserGroupInformation
            .setLoginUser(UserGroupInformation.createUserForTesting("solr", new String[] { "solr" }));
}

From source file:org.apache.sentry.tests.e2e.solr.SolrSentryServiceTestBase.java

License:Apache License

@BeforeClass
public static void setupClass() throws Exception {
    Path testDataPath = createTempDir("solr-integration-db-");

    try {//from w  w  w .  j a va  2  s  .  co  m
        sentrySvc = new TestSentryServer(testDataPath, getUserGroupMappings());
        sentrySvc.startSentryService();
        sentryClient = sentrySvc.connectToSentryService();
        log.info("Successfully started Sentry service");
    } catch (Exception ex) {
        log.error("Unexpected exception while starting Sentry service", ex);
        throw ex;
    }

    for (int i = 0; i < 4; i++) {
        sentryClient.createRole(TestSentryServer.ADMIN_USER, "role" + i, COMPONENT_SOLR);
        sentryClient.grantRoleToGroups(TestSentryServer.ADMIN_USER, "role" + i, COMPONENT_SOLR,
                Collections.singleton("group" + i));
    }

    log.info("Successfully created roles in Sentry service");

    sentryClient.createRole(TestSentryServer.ADMIN_USER, ADMIN_ROLE, COMPONENT_SOLR);
    sentryClient.grantRoleToGroups(TestSentryServer.ADMIN_USER, ADMIN_ROLE, COMPONENT_SOLR,
            Collections.singleton(TestSentryServer.ADMIN_GROUP));
    grantAdminPrivileges(TestSentryServer.ADMIN_USER, ADMIN_ROLE, SolrConstants.ALL, SolrConstants.ALL);

    log.info("Successfully granted admin privileges to " + ADMIN_ROLE);

    System.setProperty(SENTRY_SITE_LOC_SYSPROP, sentrySvc.getSentrySitePath().toString());

    // set the solr for the loginUser and belongs to solr group
    // Note - Solr/Sentry unit tests don't use Hadoop authentication framework. Hence the
    // UserGroupInformation is not available when the request is being processed by the Solr server.
    // The document level security search component requires this UserGroupInformation while querying
    // the roles associated with the user. Please refer to implementation of
    // SentryGenericProviderBackend#getRoles(...) method. Hence this is a workaround to satisfy this requirement.
    UserGroupInformation
            .setLoginUser(UserGroupInformation.createUserForTesting("solr", new String[] { "solr" }));

    try {
        configureCluster(NUM_SERVERS).withSecurityJson(TEST_PATH().resolve("security").resolve("security.json"))
                .addConfig("cloud-minimal",
                        TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf"))
                .addConfig("cloud-managed",
                        TEST_PATH().resolve("configsets").resolve("cloud-managed").resolve("conf"))
                .addConfig("cloud-minimal_doc_level_security",
                        TEST_PATH().resolve("configsets").resolve("cloud-minimal_doc_level_security")
                                .resolve("conf"))
                .addConfig("cloud-minimal_subset_match",
                        TEST_PATH().resolve("configsets").resolve("cloud-minimal_subset_match").resolve("conf"))
                .addConfig("cloud-minimal_subset_match_missing_false",
                        TEST_PATH().resolve("configsets").resolve("cloud-minimal_subset_match_missing_false")
                                .resolve("conf"))
                .addConfig("cloud-minimal_abac",
                        TEST_PATH().resolve("configsets").resolve("cloud-minimal_abac").resolve("conf"))
                .configure();
        log.info("Successfully started Solr service");

    } catch (Exception ex) {
        log.error("Unexpected exception while starting SolrCloud", ex);
        throw ex;
    }

    log.info("Successfully setup Solr with Sentry service");
}

From source file:org.apache.tez.common.security.TestACLManager.java

License:Apache License

@Test(timeout = 5000)
public void testCurrentUserACLChecks() {
    UserGroupInformation currentUser = UserGroupInformation.createUserForTesting("currentUser", noGroups);
    UserGroupInformation dagUser = UserGroupInformation.createUserForTesting("dagUser", noGroups);
    UserGroupInformation user1 = UserGroupInformation.createUserForTesting("user1", noGroups);

    ACLManager aclManager = new ACLManager(currentUser.getShortUserName());

    UserGroupInformation user = user1;//w ww  .jav a2 s. c  o  m

    Assert.assertFalse(aclManager.checkAccess(user, ACLType.AM_VIEW_ACL));
    Assert.assertFalse(aclManager.checkAccess(user, ACLType.AM_MODIFY_ACL));

    user = currentUser;
    Assert.assertTrue(aclManager.checkAccess(user, ACLType.AM_VIEW_ACL));
    Assert.assertTrue(aclManager.checkAccess(user, ACLType.AM_MODIFY_ACL));

    aclManager = new ACLManager(currentUser.getShortUserName(), new Configuration(false));

    user = user1;
    Assert.assertFalse(aclManager.checkAccess(user, ACLType.AM_VIEW_ACL));
    Assert.assertFalse(aclManager.checkAccess(user, ACLType.AM_MODIFY_ACL));

    user = currentUser;
    Assert.assertTrue(aclManager.checkAccess(user, ACLType.AM_VIEW_ACL));
    Assert.assertTrue(aclManager.checkAccess(user, ACLType.AM_MODIFY_ACL));

    ACLManager dagAclManager = new ACLManager(aclManager, dagUser.getShortUserName(), new Configuration(false));
    user = dagUser;
    Assert.assertFalse(dagAclManager.checkAccess(user, ACLType.AM_VIEW_ACL));
    Assert.assertFalse(dagAclManager.checkAccess(user, ACLType.AM_MODIFY_ACL));
    Assert.assertTrue(dagAclManager.checkAccess(user, ACLType.DAG_VIEW_ACL));
    Assert.assertTrue(dagAclManager.checkAccess(user, ACLType.DAG_MODIFY_ACL));
    user = user1;
    Assert.assertFalse(dagAclManager.checkAccess(user, ACLType.DAG_VIEW_ACL));
    Assert.assertFalse(dagAclManager.checkAccess(user, ACLType.DAG_MODIFY_ACL));
}