Example usage for org.apache.hadoop.security UserGroupInformation getCurrentUser

List of usage examples for org.apache.hadoop.security UserGroupInformation getCurrentUser

Introduction

In this page you can find the example usage for org.apache.hadoop.security UserGroupInformation getCurrentUser.

Prototype

@InterfaceAudience.Public
@InterfaceStability.Evolving
public static UserGroupInformation getCurrentUser() throws IOException 

Source Link

Document

Return the current user, including any doAs in the current stack.

Usage

From source file:org.apache.atlas.security.SecureClientUtils.java

License:Apache License

public static URLConnectionClientHandler getClientConnectionHandler(DefaultClientConfig config,
        org.apache.commons.configuration.Configuration clientConfig, String doAsUser,
        final UserGroupInformation ugi) {
    config.getProperties().put(URLConnectionClientHandler.PROPERTY_HTTP_URL_CONNECTION_SET_METHOD_WORKAROUND,
            true);/*from  w ww  .j  a  v  a 2 s.c o  m*/
    Configuration conf = new Configuration();
    conf.addResource(conf.get(SSLFactory.SSL_CLIENT_CONF_KEY, SecurityProperties.SSL_CLIENT_PROPERTIES));
    UserGroupInformation.setConfiguration(conf);
    final ConnectionConfigurator connConfigurator = newConnConfigurator(conf);
    String authType = "simple";
    if (clientConfig != null) {
        authType = clientConfig.getString("atlas.http.authentication.type", "simple");
    }
    Authenticator authenticator = new PseudoDelegationTokenAuthenticator();
    if (!authType.equals("simple")) {
        authenticator = new KerberosDelegationTokenAuthenticator();
    }
    authenticator.setConnectionConfigurator(connConfigurator);
    final DelegationTokenAuthenticator finalAuthenticator = (DelegationTokenAuthenticator) authenticator;
    final DelegationTokenAuthenticatedURL.Token token = new DelegationTokenAuthenticatedURL.Token();
    HttpURLConnectionFactory httpURLConnectionFactory = null;
    try {
        UserGroupInformation ugiToUse = ugi != null ? ugi : UserGroupInformation.getCurrentUser();
        final UserGroupInformation actualUgi = (ugiToUse
                .getAuthenticationMethod() == UserGroupInformation.AuthenticationMethod.PROXY)
                        ? ugiToUse.getRealUser()
                        : ugiToUse;
        LOG.info("Real User: {}, is from ticket cache? {}", actualUgi, actualUgi.isLoginTicketBased());
        if (StringUtils.isEmpty(doAsUser)) {
            doAsUser = actualUgi.getShortUserName();
        }
        LOG.info("doAsUser: {}", doAsUser);
        final String finalDoAsUser = doAsUser;
        httpURLConnectionFactory = new HttpURLConnectionFactory() {
            @Override
            public HttpURLConnection getHttpURLConnection(final URL url) throws IOException {
                try {
                    return actualUgi.doAs(new PrivilegedExceptionAction<HttpURLConnection>() {
                        @Override
                        public HttpURLConnection run() throws Exception {
                            try {
                                return new DelegationTokenAuthenticatedURL(finalAuthenticator, connConfigurator)
                                        .openConnection(url, token, finalDoAsUser);
                            } catch (Exception e) {
                                throw new IOException(e);
                            }
                        }
                    });
                } catch (Exception e) {
                    if (e instanceof IOException) {
                        throw (IOException) e;
                    } else {
                        throw new IOException(e);
                    }
                }
            }
        };
    } catch (IOException e) {
        LOG.warn("Error obtaining user", e);
    }

    return new URLConnectionClientHandler(httpURLConnectionFactory);
}

From source file:org.apache.atlas.web.listeners.LoginProcessorIT.java

License:Apache License

@Test
public void testDefaultSimpleLogin() throws Exception {
    LoginProcessor processor = new LoginProcessor() {
        @Override/*  ww w  .  jav a 2s .c o  m*/
        protected org.apache.commons.configuration.Configuration getApplicationConfiguration() {
            return new PropertiesConfiguration();
        }
    };
    processor.login();

    Assert.assertNotNull(UserGroupInformation.getCurrentUser());
    Assert.assertFalse(UserGroupInformation.isLoginKeytabBased());
    Assert.assertFalse(UserGroupInformation.isSecurityEnabled());
}

From source file:org.apache.atlas.web.listeners.LoginProcessorIT.java

License:Apache License

@Test
public void testKerberosLogin() throws Exception {
    final File keytab = setupKDCAndPrincipals();

    LoginProcessor processor = new LoginProcessor() {
        @Override//ww  w.  ja va 2  s . c  om
        protected org.apache.commons.configuration.Configuration getApplicationConfiguration() {
            PropertiesConfiguration config = new PropertiesConfiguration();
            config.setProperty("atlas.authentication.method", "kerberos");
            config.setProperty("atlas.authentication.principal", "dgi@EXAMPLE.COM");
            config.setProperty("atlas.authentication.keytab", keytab.getAbsolutePath());
            return config;
        }

        @Override
        protected Configuration getHadoopConfiguration() {
            Configuration config = new Configuration(false);
            config.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
            config.setBoolean(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, true);
            config.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTH_TO_LOCAL, kerberosRule);

            return config;
        }

        @Override
        protected boolean isHadoopCluster() {
            return true;
        }
    };
    processor.login();

    Assert.assertTrue(UserGroupInformation.getLoginUser().getShortUserName().endsWith("dgi"));
    Assert.assertNotNull(UserGroupInformation.getCurrentUser());
    Assert.assertTrue(UserGroupInformation.isLoginKeytabBased());
    Assert.assertTrue(UserGroupInformation.isSecurityEnabled());

    kdc.stop();

}

From source file:org.apache.atlas.web.service.CuratorFactory.java

License:Apache License

private String getCurrentUser() {
    try {// w  w w  .ja v a  2 s .c  o m
        return UserGroupInformation.getCurrentUser().getUserName();
    } catch (IOException ioe) {
        return "unknown";
    }
}

From source file:org.apache.blur.hive.BlurHiveOutputFormat.java

License:Apache License

public static UserGroupInformation getUGI(final Configuration configuration) throws IOException {
    String user = getBlurUser(configuration);
    UserGroupInformation userGroupInformation;
    UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
    if (user.equals(currentUser.getUserName())) {
        userGroupInformation = currentUser;
    } else {/*  w w w.  j ava 2s  . c o  m*/
        if (BlurHiveOutputFormat.isBlurUserAsProxy(configuration)) {
            userGroupInformation = UserGroupInformation.createProxyUser(user, currentUser);
        } else {
            userGroupInformation = UserGroupInformation.createRemoteUser(user);
        }
    }
    return userGroupInformation;
}

From source file:org.apache.blur.hive.BlurSerDeTest.java

License:Apache License

private int runLoad(boolean disableMrUpdate)
        throws IOException, InterruptedException, ClassNotFoundException, SQLException {

    Configuration configuration = miniCluster.getMRConfiguration();
    writeSiteFiles(configuration);//from  w  w w  .  j a  v  a  2s.  c o m
    HiveConf hiveConf = new HiveConf(configuration, getClass());
    hiveConf.set("hive.server2.thrift.port", "0");
    HiveServer2 hiveServer2 = new HiveServer2();
    hiveServer2.init(hiveConf);
    hiveServer2.start();

    int port = waitForStartupAndGetPort(hiveServer2);

    Class.forName(HiveDriver.class.getName());
    String userName = UserGroupInformation.getCurrentUser().getShortUserName();
    Connection connection = DriverManager.getConnection("jdbc:hive2://localhost:" + port, userName, "");

    UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();

    run(connection, "set blur.user.name=" + currentUser.getUserName());
    run(connection, "set blur.mr.update.disabled=" + disableMrUpdate);
    run(connection, "set hive.metastore.warehouse.dir=" + WAREHOUSE.toURI().toString());
    run(connection, "create database if not exists testdb");
    run(connection, "use testdb");

    run(connection, "CREATE TABLE if not exists testtable ROW FORMAT SERDE 'org.apache.blur.hive.BlurSerDe' "
            + "WITH SERDEPROPERTIES ( 'blur.zookeeper.connection'='" + miniCluster.getZkConnectionString()
            + "', " + "'blur.table'='" + TEST + "', 'blur.family'='" + FAM + "' ) "
            + "STORED BY 'org.apache.blur.hive.BlurHiveStorageHandler'");

    run(connection, "desc testtable");

    String createLoadTable = buildCreateLoadTable(connection);
    run(connection, createLoadTable);
    File dbDir = new File(WAREHOUSE, "testdb.db");
    File tableDir = new File(dbDir, "loadtable");
    int totalRecords = 100;
    generateData(tableDir, totalRecords);

    run(connection, "select * from loadtable");
    run(connection, "set " + BlurSerDe.BLUR_BLOCKING_APPLY + "=true");
    run(connection, "insert into table testtable select * from loadtable");
    connection.close();
    hiveServer2.stop();
    return totalRecords;
}

From source file:org.apache.blur.mapreduce.lib.CsvBlurDriver.java

License:Apache License

public static Job setupJob(Configuration configuration, ControllerPool controllerPool,
        AtomicReference<Callable<Void>> ref, String... otherArgs) throws Exception {
    CommandLine cmd = parse(otherArgs);//w w  w .  java2s  .  co m
    if (cmd == null) {
        return null;
    }

    final String controllerConnectionStr = cmd.getOptionValue("c");
    final String tableName = cmd.getOptionValue("t");

    final Iface client = controllerPool.getClient(controllerConnectionStr);
    TableDescriptor tableDescriptor = client.describe(tableName);

    Job job = Job.getInstance(configuration, "Blur indexer [" + tableName + "]");
    job.setJarByClass(CsvBlurDriver.class);
    job.setMapperClass(CsvBlurMapper.class);

    if (cmd.hasOption("p")) {
        job.getConfiguration().set(MAPRED_COMPRESS_MAP_OUTPUT, "true");
        String codecStr = cmd.getOptionValue("p");
        COMPRESSION compression;
        try {
            compression = COMPRESSION.valueOf(codecStr.trim().toUpperCase());
        } catch (IllegalArgumentException e) {
            compression = null;
        }
        if (compression == null) {
            job.getConfiguration().set(MAPRED_MAP_OUTPUT_COMPRESSION_CODEC, codecStr.trim());
        } else {
            job.getConfiguration().set(MAPRED_MAP_OUTPUT_COMPRESSION_CODEC, compression.getClassName());
        }
    }
    if (cmd.hasOption("a")) {
        CsvBlurMapper.setAutoGenerateRecordIdAsHashOfData(job, true);
    }
    if (cmd.hasOption("A")) {
        CsvBlurMapper.setAutoGenerateRowIdAsHashOfData(job, true);
    }
    if (cmd.hasOption("S")) {
        job.setInputFormatClass(SequenceFileInputFormat.class);
    } else {
        job.setInputFormatClass(TextInputFormat.class);
    }

    if (cmd.hasOption("C")) {
        if (cmd.hasOption("S")) {
            String[] optionValues = cmd.getOptionValues("C");
            job.setInputFormatClass(CsvBlurCombineSequenceFileInputFormat.class);
            CombineFileInputFormat.setMinInputSplitSize(job, Long.parseLong(optionValues[0]));
            CombineFileInputFormat.setMaxInputSplitSize(job, Long.parseLong(optionValues[1]));
        } else {
            System.err.println("'C' can only be used with option 'S'");
            return null;
        }
    }

    if (cmd.hasOption("i")) {
        for (String input : cmd.getOptionValues("i")) {
            Path path = new Path(input);
            Set<Path> pathSet = recurisvelyGetPathesContainingFiles(path, job.getConfiguration());
            if (pathSet.isEmpty()) {
                FileInputFormat.addInputPath(job, path);
            } else {
                for (Path p : pathSet) {
                    FileInputFormat.addInputPath(job, p);
                }
            }
        }
    }
    // processing the 'I' option
    if (cmd.hasOption("I")) {
        if (cmd.hasOption("C")) {
            System.err.println("'I' and 'C' both parameters can not be used together.");
            return null;
        }
        Option[] options = cmd.getOptions();
        for (Option option : options) {
            if (option.getOpt().equals("I")) {
                String[] values = option.getValues();
                if (values.length < 2) {
                    System.err.println("'I' parameter missing minimum args of (family path*)");
                    return null;
                }
                for (String p : getSubArray(values, 1)) {
                    Path path = new Path(p);
                    CsvBlurMapper.addFamilyPath(job, values[0], path);
                    FileInputFormat.addInputPath(job, path);
                }
            }
        }
    }

    if (cmd.hasOption("s")) {
        CsvBlurMapper.setSeparator(job, StringEscapeUtils.unescapeJava(cmd.getOptionValue("s")));
    }
    if (cmd.hasOption("o")) {
        BlurOutputFormat.setOptimizeInFlight(job, false);
    }
    if (cmd.hasOption("l")) {
        BlurOutputFormat.setIndexLocally(job, false);
    }
    if (cmd.hasOption("b")) {
        int maxDocumentBufferSize = Integer.parseInt(cmd.getOptionValue("b"));
        BlurOutputFormat.setMaxDocumentBufferSize(job, maxDocumentBufferSize);
    }
    // processing the 'd' option
    Option[] options = cmd.getOptions();
    for (Option option : options) {
        if (option.getOpt().equals("d")) {
            String[] values = option.getValues();
            if (values.length < 2) {
                System.err.println("'d' parameter missing minimum args of (family columname*)");
                return null;
            }
            CsvBlurMapper.addColumns(job, values[0], getSubArray(values, 1));
        }
    }
    BlurOutputFormat.setupJob(job, tableDescriptor);
    BlurMapReduceUtil.addDependencyJars(job.getConfiguration(), Splitter.class);
    if (cmd.hasOption("r")) {
        int reducerMultiplier = Integer.parseInt(cmd.getOptionValue("r"));
        BlurOutputFormat.setReducerMultiplier(job, reducerMultiplier);
    }
    final Path output;
    if (cmd.hasOption("out")) {
        output = new Path(cmd.getOptionValue("out"));
    } else {
        UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
        String userName = currentUser.getUserName();
        output = new Path("/user/" + userName + "/.blur-" + System.currentTimeMillis());
    }
    BlurOutputFormat.setOutputPath(job, output);
    if (cmd.hasOption("import")) {
        ref.set(new Callable<Void>() {
            @Override
            public Void call() throws Exception {
                client.loadData(tableName, output.toUri().toString());
                return null;
            }
        });
    }
    return job;
}

From source file:org.apache.carbondata.common.logging.impl.StandardLogService.java

License:Apache License

/**
 * log audit log/*from  www .j  a  v  a2  s .  co m*/
 *
 * @param msg audit log message
 */
@Override
public void audit(String msg) {
    String hostName = "";

    try {
        hostName = InetAddress.getLocalHost().getHostName();
    } catch (UnknownHostException e) {
        hostName = "localhost";
    }
    String username = "unknown";
    String threadid = "unknown";
    try {
        threadid = Thread.currentThread().getId() + "";
        username = UserGroupInformation.getCurrentUser().getShortUserName();
    } catch (IOException e) {
        username = "unknown";
    }
    logger.log(AuditLevel.AUDIT,
            "[" + hostName + "]" + "[" + username + "]" + "[Thread-" + threadid + "]" + msg);
}

From source file:org.apache.coheigea.bigdata.hdfs.HDFSKerberosTest.java

License:Apache License

@org.junit.Test
public void readTest() throws Exception {
    FileSystem fileSystem = hdfsCluster.getFileSystem();

    // Write a file - the AccessControlEnforcer won't be invoked as we are the "superuser"
    final Path file = new Path("/tmp/tmpdir/data-file2");
    FSDataOutputStream out = fileSystem.create(file);
    for (int i = 0; i < 1024; ++i) {
        out.write(("data" + i + "\n").getBytes("UTF-8"));
        out.flush();//www.ja  v  a2 s  .c o m
    }
    out.close();

    // Change permissions to read-only
    fileSystem.setPermission(file, new FsPermission(FsAction.READ, FsAction.NONE, FsAction.NONE));

    // Now try to read the file as "bob" - this should be allowed (by the policy - user)
    final Configuration conf = new Configuration();
    conf.set("fs.defaultFS", defaultFs);
    conf.set("hadoop.security.authentication", "kerberos");
    UserGroupInformation.setConfiguration(conf);

    String basedir = System.getProperty("basedir");
    if (basedir == null) {
        basedir = new File(".").getCanonicalPath();
    }

    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
    ugi.doAs(new PrivilegedExceptionAction<Void>() {

        public Void run() throws Exception {
            FileSystem fs = FileSystem.get(conf);

            // Read the file
            FSDataInputStream in = fs.open(file);
            ByteArrayOutputStream output = new ByteArrayOutputStream();
            IOUtils.copy(in, output);
            String content = new String(output.toByteArray());
            Assert.assertTrue(content.startsWith("data0"));

            fs.close();
            return null;
        }
    });
}

From source file:org.apache.crunch.io.hcatalog.HCatSourceITSpec.java

License:Apache License

@Test
public void test_HCatRead_NonNativeTable_HBase() throws Exception {
    HBaseTestingUtility hbaseTestUtil = null;
    try {/* w w  w  .ja  va  2 s .  c  o  m*/
        String db = "default";
        String hiveTable = "test";
        Configuration hbaseConf = HBaseConfiguration.create(conf);
        hbaseTestUtil = new HBaseTestingUtility(hbaseConf);
        hbaseTestUtil.startMiniZKCluster();
        hbaseTestUtil.startMiniHBaseCluster(1, 1);

        org.apache.hadoop.hbase.client.Table table = hbaseTestUtil.createTable(TableName.valueOf("test-table"),
                "fam");

        String key1 = "this-is-a-key";
        Put put = new Put(Bytes.toBytes(key1));
        put.addColumn("fam".getBytes(), "foo".getBytes(), "17".getBytes());
        table.put(put);
        String key2 = "this-is-a-key-too";
        Put put2 = new Put(Bytes.toBytes(key2));
        put2.addColumn("fam".getBytes(), "foo".getBytes(), "29".getBytes());
        table.put(put2);
        table.close();

        org.apache.hadoop.hive.ql.metadata.Table tbl = new org.apache.hadoop.hive.ql.metadata.Table(db,
                hiveTable);
        tbl.setOwner(UserGroupInformation.getCurrentUser().getShortUserName());
        tbl.setTableType(TableType.EXTERNAL_TABLE);

        FieldSchema f1 = new FieldSchema();
        f1.setName("foo");
        f1.setType("int");
        FieldSchema f2 = new FieldSchema();
        f2.setName("key");
        f2.setType("string");

        tbl.setProperty("hbase.table.name", "test-table");
        tbl.setProperty("hbase.mapred.output.outputtable", "test-table");
        tbl.setProperty("storage_handler", "org.apache.hadoop.hive.hbase.HBaseStorageHandler");
        tbl.setSerializationLib("org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe");
        tbl.setFields(ImmutableList.of(f1, f2));
        tbl.setSerdeParam("hbase.columns.mapping", "fam:foo,:key");
        this.client.createTable(tbl.getTTable());

        Pipeline p = new MRPipeline(HCatSourceITSpec.class, hbaseConf);
        HCatSourceTarget src = (HCatSourceTarget) FromHCat.table(hiveTable);

        HCatSchema schema = src.getTableSchema(p.getConfiguration());
        PCollection<HCatRecord> records = p.read(src);
        List<Pair<String, Integer>> mat = Lists
                .newArrayList(records.parallelDo(new HCatTestUtils.Fns.KeyMapPairFn(schema),
                        Avros.tableOf(Avros.strings(), Avros.ints())).materialize());

        p.done();

        assertEquals(ImmutableList.of(Pair.of(key1, 17), Pair.of(key2, 29)), mat);
    } finally {
        if (hbaseTestUtil != null) {
            hbaseTestUtil.shutdownMiniHBaseCluster();
            hbaseTestUtil.shutdownMiniZKCluster();
        }
    }
}