List of usage examples for org.apache.hadoop.security UserGroupInformation getCurrentUser
@InterfaceAudience.Public @InterfaceStability.Evolving public static UserGroupInformation getCurrentUser() throws IOException
From source file:org.apache.crunch.io.hcatalog.HCatTargetITSpec.java
License:Apache License
@Test public void test_HCatTarget_WriteToNonNativeTable_HBase() throws Exception { HBaseTestingUtility hbaseTestUtil = null; try {/* w w w . java 2 s.c om*/ String db = "default"; String sourceHiveTable = "source_table"; String destinationHiveTable = "dest_table"; Configuration configuration = HBaseConfiguration.create(conf); hbaseTestUtil = new HBaseTestingUtility(configuration); hbaseTestUtil.startMiniZKCluster(); hbaseTestUtil.startMiniHBaseCluster(1, 1); org.apache.hadoop.hbase.client.Table sourceTable = hbaseTestUtil .createTable(TableName.valueOf(sourceHiveTable), "fam"); String key1 = "this-is-a-key"; Put put = new Put(Bytes.toBytes(key1)); put.addColumn("fam".getBytes(), "foo".getBytes(), "17".getBytes()); sourceTable.put(put); String key2 = "this-is-a-key-too"; Put put2 = new Put(Bytes.toBytes(key2)); put2.addColumn("fam".getBytes(), "foo".getBytes(), "29".getBytes()); sourceTable.put(put2); sourceTable.close(); // create Hive Table for source table org.apache.hadoop.hive.ql.metadata.Table tbl = new org.apache.hadoop.hive.ql.metadata.Table(db, sourceHiveTable); tbl.setOwner(UserGroupInformation.getCurrentUser().getShortUserName()); tbl.setTableType(TableType.EXTERNAL_TABLE); FieldSchema f1 = new FieldSchema(); f1.setName("foo"); f1.setType("int"); FieldSchema f2 = new FieldSchema(); f2.setName("key"); f2.setType("string"); tbl.setProperty("storage_handler", "org.apache.hadoop.hive.hbase.HBaseStorageHandler"); tbl.setSerializationLib("org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"); tbl.setFields(ImmutableList.of(f1, f2)); tbl.setSerdeParam("hbase.columns.mapping", "fam:foo,:key"); this.client.createTable(tbl.getTTable()); // creates destination table hbaseTestUtil.createTable(TableName.valueOf(destinationHiveTable), "fam"); org.apache.hadoop.hive.ql.metadata.Table destTable = new org.apache.hadoop.hive.ql.metadata.Table(db, destinationHiveTable); destTable.setOwner(UserGroupInformation.getCurrentUser().getShortUserName()); destTable.setTableType(TableType.EXTERNAL_TABLE); destTable.setProperty("storage_handler", "org.apache.hadoop.hive.hbase.HBaseStorageHandler"); destTable.setSerializationLib("org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"); destTable.setFields(ImmutableList.of(f1, f2)); destTable.setSerdeParam("hbase.columns.mapping", "fam:foo,:key"); this.client.createTable(destTable.getTTable()); Pipeline p = new MRPipeline(HCatSourceITSpec.class, configuration); PCollection<HCatRecord> records = p.read(FromHCat.table(sourceHiveTable)); p.write(records, ToHCat.table(destinationHiveTable)); p.done(); Connection connection = null; try { Scan scan = new Scan(); connection = ConnectionFactory.createConnection(configuration); org.apache.hadoop.hbase.client.Table table = connection .getTable(TableName.valueOf(destinationHiveTable)); ResultScanner scanner = table.getScanner(scan); Result result = null; List<Pair<String, Integer>> actual = new ArrayList<>(); while (((result = scanner.next()) != null)) { String value = Bytes.toString(result.getValue("fam".getBytes(), "foo".getBytes())); actual.add(Pair.of(Bytes.toString(result.getRow()), Integer.parseInt(value))); } Assert.assertEquals(ImmutableList.of(Pair.of(key1, 17), Pair.of(key2, 29)), actual); } finally { IOUtils.closeQuietly(connection); } } finally { if (hbaseTestUtil != null) { hbaseTestUtil.shutdownMiniHBaseCluster(); hbaseTestUtil.shutdownMiniZKCluster(); } } }
From source file:org.apache.crunch.io.hcatalog.HCatTestUtils.java
License:Apache License
public static Table createTable(IMetaStoreClient client, String db, String tableName, TableType type, @Nullable Path datalocation, List<FieldSchema> partCols) throws IOException, HiveException, TException { org.apache.hadoop.hive.ql.metadata.Table tbl = new org.apache.hadoop.hive.ql.metadata.Table(db, tableName); tbl.setOwner(UserGroupInformation.getCurrentUser().getShortUserName()); tbl.setTableType(type);/*from w ww .java 2 s . c o m*/ if (datalocation != null) tbl.setDataLocation(datalocation); FieldSchema f1 = new FieldSchema(); f1.setName("foo"); f1.setType("int"); FieldSchema f2 = new FieldSchema(); f2.setName("bar"); f2.setType("string"); if (partCols != null && !partCols.isEmpty()) tbl.setPartCols(partCols); tbl.setFields(ImmutableList.of(f1, f2)); tbl.setSerializationLib("org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"); tbl.setSerdeParam("field.delim", ","); tbl.setSerdeParam("serialization.format", ","); tbl.setInputFormatClass("org.apache.hadoop.mapred.TextInputFormat"); tbl.setOutputFormatClass("org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"); client.createTable(tbl.getTTable()); return client.getTable(db, tableName); }
From source file:org.apache.drill.exec.rpc.security.kerberos.KerberosFactory.java
License:Apache License
@Override public UserGroupInformation createAndLoginUser(final Map<String, ?> properties) throws IOException { final Configuration conf = new Configuration(); conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, UserGroupInformation.AuthenticationMethod.KERBEROS.toString()); UserGroupInformation.setConfiguration(conf); final String keytab = (String) properties.get(DrillProperties.KEYTAB); final boolean assumeSubject = properties.containsKey(DrillProperties.KERBEROS_FROM_SUBJECT) && Boolean.parseBoolean((String) properties.get(DrillProperties.KERBEROS_FROM_SUBJECT)); try {/*from w ww. ja va 2 s. c o m*/ final UserGroupInformation ugi; if (assumeSubject) { ugi = UserGroupInformation.getUGIFromSubject(Subject.getSubject(AccessController.getContext())); logger.debug("Assuming subject for {}.", ugi.getShortUserName()); } else { if (keytab != null) { ugi = UserGroupInformation .loginUserFromKeytabAndReturnUGI((String) properties.get(DrillProperties.USER), keytab); logger.debug("Logged in {} using keytab.", ugi.getShortUserName()); } else { // includes Kerberos ticket login ugi = UserGroupInformation.getCurrentUser(); logger.debug("Logged in {} using ticket.", ugi.getShortUserName()); } } return ugi; } catch (final IOException e) { logger.debug("Login failed.", e); final Throwable cause = e.getCause(); if (cause instanceof LoginException) { throw new SaslException("Failed to login.", cause); } throw new SaslException("Unexpected failure trying to login.", cause); } }
From source file:org.apache.drill.exec.rpc.security.plain.PlainFactory.java
License:Apache License
@Override public UserGroupInformation createAndLoginUser(Map<String, ?> properties) throws IOException { final Configuration conf = new Configuration(); UserGroupInformation.setConfiguration(conf); try {//ww w .j a v a 2 s.c o m return UserGroupInformation.getCurrentUser(); } catch (final IOException e) { logger.debug("Login failed.", e); final Throwable cause = e.getCause(); if (cause instanceof LoginException) { throw new SaslException("Failed to login.", cause); } throw new SaslException("Unexpected failure trying to login. ", cause); } }
From source file:org.apache.drill.exec.rpc.user.DrillUser.java
License:Apache License
public DrillUser(String userName) throws IOException { this.hadoopUser = UserGroupInformation.createProxyUser(userName, UserGroupInformation.getCurrentUser()); }
From source file:org.apache.drill.exec.store.mapr.db.MapRDBTableCache.java
License:Apache License
/** * getTable given primary table path and indexDesc. * returns Table for corresponding index table if indexDesc is not null. * returns Table for primary table if indexDesc is null. * * @param tablePath primary table path/* ww w.java 2s .c om*/ * @param indexDesc index table descriptor */ public Table getTable(final Path tablePath, final IndexDesc indexDesc, final String userName) throws DrillRuntimeException { final Table dbTableHandle; final UserGroupInformation proxyUserUgi = ImpersonationUtil.createProxyUgi(userName); try { dbTableHandle = proxyUserUgi.doAs(new PrivilegedExceptionAction<Table>() { public Table run() throws Exception { if (logger.isTraceEnabled()) { logger.trace("Getting MaprDB Table handle for proxy user: " + UserGroupInformation.getCurrentUser()); } if (tableCachingEnabled) { Table table = tableCache.get(new MapRDBTableCache.Key(tablePath, indexDesc)); logger.trace( "time {} get the tablePath {} tableHandle {} index {} userName {} currentUser {}", System.nanoTime(), tablePath == null ? "null" : tablePath, table == null ? "null" : table, indexDesc == null ? "null" : indexDesc.getIndexName(), userName == null ? "null" : userName, UserGroupInformation.getCurrentUser() == null ? "null" : UserGroupInformation.getCurrentUser()); return table; } else { return indexDesc == null ? MapRDBImpl.getTable(tablePath) : MapRDBImpl.getIndexTable(indexDesc); } } }); } catch (Exception e) { throw new DrillRuntimeException("Error getting table: " + tablePath.toString() + (indexDesc == null ? "" : (", " + "IndexDesc: " + indexDesc.toString())), e); } return dbTableHandle; }
From source file:org.apache.drill.exec.util.ImpersonationUtil.java
License:Apache License
/** Helper method to create DrillFileSystem */ private static DrillFileSystem createFileSystem(UserGroupInformation proxyUserUgi, final Configuration fsConf, final OperatorStats stats) { DrillFileSystem fs;//from ww w. j ava2 s .c o m try { fs = proxyUserUgi.doAs(new PrivilegedExceptionAction<DrillFileSystem>() { public DrillFileSystem run() throws Exception { logger.trace( "Creating DrillFileSystem for proxy user: " + UserGroupInformation.getCurrentUser()); return new DrillFileSystem(fsConf, stats); } }); } catch (InterruptedException | IOException e) { final String errMsg = "Failed to create DrillFileSystem for proxy user: " + e.getMessage(); logger.error(errMsg, e); throw new DrillRuntimeException(errMsg, e); } return fs; }
From source file:org.apache.druid.indexer.JobHelper.java
License:Apache License
/** * Dose authenticate against a secured hadoop cluster * In case of any bug fix make sure to fix the code at HdfsStorageAuthentication#authenticate as well. * * @param config containing the principal name and keytab path. */// www . ja v a 2 s .c o m public static void authenticate(HadoopDruidIndexerConfig config) { String principal = config.HADOOP_KERBEROS_CONFIG.getPrincipal(); String keytab = config.HADOOP_KERBEROS_CONFIG.getKeytab(); if (!Strings.isNullOrEmpty(principal) && !Strings.isNullOrEmpty(keytab)) { Configuration conf = new Configuration(); UserGroupInformation.setConfiguration(conf); if (UserGroupInformation.isSecurityEnabled()) { try { if (UserGroupInformation.getCurrentUser().hasKerberosCredentials() == false || !UserGroupInformation.getCurrentUser().getUserName().equals(principal)) { log.info("trying to authenticate user [%s] with keytab [%s]", principal, keytab); UserGroupInformation.loginUserFromKeytab(principal, keytab); } } catch (IOException e) { throw new ISE(e, "Failed to authenticate user principal [%s] with keytab [%s]", principal, keytab); } } } }
From source file:org.apache.druid.security.kerberos.DruidKerberosUtil.java
License:Apache License
public static void authenticateIfRequired(String internalClientPrincipal, String internalClientKeytab) { if (!Strings.isNullOrEmpty(internalClientPrincipal) && !Strings.isNullOrEmpty(internalClientKeytab)) { Configuration conf = new Configuration(); conf.setClassLoader(DruidKerberosModule.class.getClassLoader()); conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); UserGroupInformation.setConfiguration(conf); try {/* www . j av a 2s.co m*/ //login for the first time. if (UserGroupInformation.getCurrentUser().hasKerberosCredentials() == false || !UserGroupInformation.getCurrentUser().getUserName().equals(internalClientPrincipal)) { log.info("trying to authenticate user [%s] with keytab [%s]", internalClientPrincipal, internalClientKeytab); UserGroupInformation.loginUserFromKeytab(internalClientPrincipal, internalClientKeytab); return; } //try to relogin in case the TGT expired if (UserGroupInformation.isLoginKeytabBased()) { log.info("Re-Login from key tab [%s] with principal [%s]", internalClientKeytab, internalClientPrincipal); UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab(); return; } else if (UserGroupInformation.isLoginTicketBased()) { log.info("Re-Login from Ticket cache"); UserGroupInformation.getLoginUser().reloginFromTicketCache(); return; } } catch (IOException e) { throw new ISE(e, "Failed to authenticate user principal [%s] with keytab [%s]", internalClientPrincipal, internalClientKeytab); } } }
From source file:org.apache.druid.security.kerberos.KerberosHttpClient.java
License:Apache License
private <Intermediate, Final> void inner_go(final Request request, final HttpResponseHandler<Intermediate, Final> httpResponseHandler, final Duration duration, final SettableFuture<Final> future) { try {//from w ww . j a v a 2 s. co m final String host = request.getUrl().getHost(); final URI uri = request.getUrl().toURI(); Map<String, List<String>> cookieMap = cookieManager.get(uri, Collections.emptyMap()); for (Map.Entry<String, List<String>> entry : cookieMap.entrySet()) { request.addHeaderValues(entry.getKey(), entry.getValue()); } final boolean should_retry_on_unauthorized_response; if (DruidKerberosUtil.needToSendCredentials(cookieManager.getCookieStore(), uri)) { // No Cookies for requested URI, authenticate user and add authentication header log.debug("No Auth Cookie found for URI[%s]. Existing Cookies[%s] Authenticating... ", uri, cookieManager.getCookieStore().getCookies()); DruidKerberosUtil.authenticateIfRequired(internalClientPrincipal, internalClientKeytab); UserGroupInformation currentUser = UserGroupInformation.getCurrentUser(); String challenge = currentUser.doAs(new PrivilegedExceptionAction<String>() { @Override public String run() throws Exception { return DruidKerberosUtil.kerberosChallenge(host); } }); request.setHeader(HttpHeaders.Names.AUTHORIZATION, "Negotiate " + challenge); should_retry_on_unauthorized_response = false; } else { should_retry_on_unauthorized_response = true; log.debug("Found Auth Cookie found for URI[%s].", uri); } ListenableFuture<RetryResponseHolder<Final>> internalFuture = delegate.go(request, new RetryIfUnauthorizedResponseHandler<Intermediate, Final>(new ResponseCookieHandler( request.getUrl().toURI(), cookieManager, httpResponseHandler)), duration); Futures.addCallback(internalFuture, new FutureCallback<RetryResponseHolder<Final>>() { @Override public void onSuccess(RetryResponseHolder<Final> result) { if (should_retry_on_unauthorized_response && result.shouldRetry()) { log.info("Preparing for Retry"); // remove Auth cookie DruidKerberosUtil.removeAuthCookie(cookieManager.getCookieStore(), uri); // clear existing cookie request.setHeader("Cookie", ""); inner_go(request.copy(), httpResponseHandler, duration, future); } else { log.debug("Not retrying and returning future response"); future.set(result.getObj()); } } @Override public void onFailure(Throwable t) { future.setException(t); } }, exec); } catch (Throwable e) { throw new RuntimeException(e); } }