List of usage examples for org.apache.hadoop.security SecurityUtil getServerPrincipal
@InterfaceAudience.Public @InterfaceStability.Evolving public static String getServerPrincipal(String principalConfig, InetAddress addr) throws IOException
From source file:azkaban.security.HadoopSecurityManager_H_2_0.java
License:Apache License
private Text getMRTokenRenewerInternal(JobConf jobConf) throws IOException { // Taken from Oozie ////w w w. j a v a 2 s. c om // Getting renewer correctly for JT principal also though JT in hadoop // 1.x does not have // support for renewing/cancelling tokens String servicePrincipal = jobConf.get(RM_PRINCIPAL, jobConf.get(JT_PRINCIPAL)); Text renewer; if (servicePrincipal != null) { String target = jobConf.get(HADOOP_YARN_RM, jobConf.get(HADOOP_JOB_TRACKER_2)); if (target == null) { target = jobConf.get(HADOOP_JOB_TRACKER); } String addr = NetUtils.createSocketAddr(target).getHostName(); renewer = new Text(SecurityUtil.getServerPrincipal(servicePrincipal, addr)); } else { // No security renewer = DEFAULT_RENEWER; } return renewer; }
From source file:com.blackberry.bdp.kaboom.Authenticator.java
License:Apache License
private boolean authenticate(String proxyUserName) { UserGroupInformation proxyTicket;/*from w w w . j ava 2 s .co m*/ // logic for kerberos login boolean useSecurity = UserGroupInformation.isSecurityEnabled(); LOG.info("Hadoop Security enabled: " + useSecurity); if (useSecurity) { // sanity checking if (kerbConfPrincipal.isEmpty()) { LOG.error("Hadoop running in secure mode, but Flume config doesn't " + "specify a principal to use for Kerberos auth."); return false; } if (kerbKeytab.isEmpty()) { LOG.error("Hadoop running in secure mode, but Flume config doesn't " + "specify a keytab to use for Kerberos auth."); return false; } String principal; try { // resolves _HOST pattern using standard Hadoop search/replace // via DNS lookup when 2nd argument is empty principal = SecurityUtil.getServerPrincipal(kerbConfPrincipal, ""); } catch (IOException e) { LOG.error("Host lookup error resolving kerberos principal (" + kerbConfPrincipal + "). Exception follows.", e); return false; } Preconditions.checkNotNull(principal, "Principal must not be null"); KerberosUser prevUser = staticLogin.get(); KerberosUser newUser = new KerberosUser(principal, kerbKeytab); // be cruel and unusual when user tries to login as multiple principals // this isn't really valid with a reconfigure but this should be rare // enough to warrant a restart of the agent JVM // TODO: find a way to interrogate the entire current config state, // since we don't have to be unnecessarily protective if they switch all // HDFS sinks to use a different principal all at once. Preconditions.checkState(prevUser == null || prevUser.equals(newUser), "Cannot use multiple kerberos principals in the same agent. " + " Must restart agent to use new principal or keytab. " + "Previous = %s, New = %s", prevUser, newUser); // attempt to use cached credential if the user is the same // this is polite and should avoid flooding the KDC with auth requests UserGroupInformation curUser = null; if (prevUser != null && prevUser.equals(newUser)) { try { LOG.info("Attempting login as {} with cached credentials", prevUser.getPrincipal()); curUser = UserGroupInformation.getLoginUser(); } catch (IOException e) { LOG.warn("User unexpectedly had no active login. Continuing with " + "authentication", e); } } if (curUser == null || !curUser.getUserName().equals(principal)) { try { // static login curUser = kerberosLogin(this, principal, kerbKeytab); LOG.info("Current user obtained from Kerberos login {}", curUser.getUserName()); } catch (IOException e) { LOG.error("Authentication or file read error while attempting to " + "login as kerberos principal (" + principal + ") using " + "keytab (" + kerbKeytab + "). Exception follows.", e); return false; } } else { LOG.debug("{}: Using existing principal login: {}", this, curUser); } try { if (UserGroupInformation.getLoginUser().isFromKeytab() == false) { LOG.warn("Using a keytab for authentication is {}", UserGroupInformation.getLoginUser().isFromKeytab()); LOG.warn("curUser.isFromKeytab(): {}", curUser.isFromKeytab()); LOG.warn("UserGroupInformation.getCurrentUser().isLoginKeytabBased(): {}", UserGroupInformation.getCurrentUser().isLoginKeytabBased()); LOG.warn("UserGroupInformation.isLoginKeytabBased(): {}", UserGroupInformation.isLoginKeytabBased()); LOG.warn("curUser.getAuthenticationMethod(): {}", curUser.getAuthenticationMethod()); //System.exit(1); } } catch (IOException e) { LOG.error("Failed to get login user.", e); System.exit(1); } // we supposedly got through this unscathed... so store the static user staticLogin.set(newUser); } // hadoop impersonation works with or without kerberos security proxyTicket = null; if (!proxyUserName.isEmpty()) { try { proxyTicket = UserGroupInformation.createProxyUser(proxyUserName, UserGroupInformation.getLoginUser()); } catch (IOException e) { LOG.error("Unable to login as proxy user. Exception follows.", e); return false; } } UserGroupInformation ugi = null; if (proxyTicket != null) { ugi = proxyTicket; } else if (useSecurity) { try { ugi = UserGroupInformation.getLoginUser(); } catch (IOException e) { LOG.error("Unexpected error: Unable to get authenticated user after " + "apparent successful login! Exception follows.", e); return false; } } if (ugi != null) { // dump login information AuthenticationMethod authMethod = ugi.getAuthenticationMethod(); LOG.info("Auth method: {}", authMethod); LOG.info(" User name: {}", ugi.getUserName()); LOG.info(" Using keytab: {}", ugi.isFromKeytab()); if (authMethod == AuthenticationMethod.PROXY) { UserGroupInformation superUser; try { superUser = UserGroupInformation.getLoginUser(); LOG.info(" Superuser auth: {}", superUser.getAuthenticationMethod()); LOG.info(" Superuser name: {}", superUser.getUserName()); LOG.info(" Superuser using keytab: {}", superUser.isFromKeytab()); } catch (IOException e) { LOG.error("Unexpected error: unknown superuser impersonating proxy.", e); return false; } } LOG.info("Logged in as user {}", ugi.getUserName()); UGIState state = new UGIState(); state.ugi = proxyTicket; state.lastAuthenticated = System.currentTimeMillis(); proxyUserMap.put(proxyUserName, state); return true; } return true; }
From source file:com.cloudera.beeswax.Server.java
License:Apache License
/** * Authenticate using kerberos if configured *///from www. j a va 2s . co m private static void doKerberosAuth() throws IllegalArgumentException { if (keytabFile == null || keytabFile.isEmpty()) { throw new IllegalArgumentException("No keytab specified"); } if (principalConf == null || principalConf.isEmpty()) { throw new IllegalArgumentException("No principal specified"); } // Login from the keytab try { kerberosName = SecurityUtil.getServerPrincipal(principalConf, "0.0.0.0"); UserGroupInformation.loginUserFromKeytab(kerberosName, keytabFile); LOG.info("Logged in using Kerberos ticket for '" + kerberosName + "' from " + keytabFile); bwUgi = UserGroupInformation.getCurrentUser(); // Start a thread to periodically refresh kerberos ticket Thread t = new Thread(new Runnable() { @Override public void run() { while (true) { try { Thread.sleep(refreshInterval); } catch (InterruptedException e) { return; } try { LOG.info("Refreshed Kerberos ticket for '" + kerberosName + "' from " + keytabFile); UserGroupInformation.getLoginUser().reloginFromKeytab(); } catch (IOException eIO) { LOG.error("Error refreshing Kerberos ticket", eIO); } } } }, "KerberosRefresher"); t.start(); } catch (IOException e) { throw new IllegalArgumentException("Couldn't setup Kerberos authentication", e); } }
From source file:com.cloudera.impala.service.ZooKeeperSession.java
License:Apache License
/** * Setup configuration to connect to Zookeeper using kerberos. *///ww w . j a v a 2s . co m private void setupJAASConfig(String principal, String keytab) throws IOException { Preconditions.checkArgument(principal != null && !principal.isEmpty()); if (keytab == null || keytab.trim().isEmpty()) { throw new IOException("Keytab must be set to connect using kerberos."); } LOGGER.debug("Authenticating with principal {} and keytab {}", principal, keytab); System.setProperty(ZooKeeperSaslClient.LOGIN_CONTEXT_NAME_KEY, SASL_LOGIN_CONTEXT_NAME); principal = SecurityUtil.getServerPrincipal(principal, "0.0.0.0"); JaasConfiguration jaasConf = new JaasConfiguration(SASL_LOGIN_CONTEXT_NAME, principal, keytab); // Install the Configuration in the runtime. javax.security.auth.login.Configuration.setConfiguration(jaasConf); }
From source file:com.emc.greenplum.gpdb.hdfsconnector.ConnectorUtil.java
License:Open Source License
/** * Helper routine to login to secure Hadoop. If it's not configured to use * security (in the core-site.xml) then return * * Create a LoginContext using config in $GPHOME/lib/hadoop/jaas.conf and search for a valid TGT * which matches HADOOP_SECURITY_USERNAME. * Check if the TGT needs to be renewed or recreated and use installed kinit command to handle the * credential cache//w w w. ja va 2s .c o m * * @param conf the configuration */ protected static void loginSecureHadoop(Configuration conf) throws IOException, InterruptedException { // if security config does not exist then assume no security if (conf.get(HADOOP_SECURITY_USERNAME) == null || conf.get(HADOOP_SECURITY_USER_KEYTAB_FILE) == null) { return; } String principal = SecurityUtil.getServerPrincipal(conf.get(HADOOP_SECURITY_USERNAME), InetAddress.getLocalHost().getCanonicalHostName()); String jaasConf = System.getenv("GPHOME") + "/lib/hadoop/jaas.conf"; System.setProperty("java.security.auth.login.config", jaasConf); Boolean kinitDisabled = conf.getBoolean(HADOOP_DISABLE_KINIT, false); /* Attempt to find the TGT from the users ticket cache and check if its a valid TGT If the TGT needs to be renewed or recreated then we use kinit binary command so the cache can be persisted allowing future queries to reuse cached TGT's If user disables kinit then we fail back SecurityUtil.login which will always perform a AS_REQ followed by a TGS_REQ to the KDC and set the global login context. the problem with this method is if you have 300 or more GPDB segments then every gphdfs query will issue 300 AS_REQ to the KDC and may result in intermittent failures or longer running queries if the KDC can not keep up with the demand */ try { LoginContext login = new LoginContext("gphdfs"); login.login(); Subject subject = login.getSubject(); Set<KerberosTicket> tickets = subject.getPrivateCredentials(KerberosTicket.class); // find the TGT that matches the configured principal for (KerberosTicket ticket : tickets) { if (ticket.getClient().toString().equals(principal)) { long start = ticket.getStartTime().getTime(); long end = ticket.getEndTime().getTime(); long current = System.currentTimeMillis(); Long rtime = start + (long) ((end - start) * .8); // compute start time of ticket plus 80% to find the refresh window if (current <= rtime && ticket.isCurrent()) { // Ticket is current so no further action required return; } else if (current >= rtime && ticket.isRenewable() && !kinitDisabled) { // Ticket needs to be renewed and is renewable String[] kinitRefresh = { "kinit", "-R" }; Process kinitRenew = Runtime.getRuntime().exec(kinitRefresh); int rt = kinitRenew.waitFor(); if (rt == 0) { return; } } break; } } } catch (LoginException | InterruptedException e) { if (kinitDisabled) { SecurityUtil.login(conf, HADOOP_SECURITY_USER_KEYTAB_FILE, HADOOP_SECURITY_USERNAME); return; } /* if kinit is not disabled then do nothing because we will request a new TGT and update the ticket cache * regardless if login or kinit refresh failed initially */ } // fail back to securityutil if kinit is disabled if (kinitDisabled) { // login from keytab SecurityUtil.login(conf, HADOOP_SECURITY_USER_KEYTAB_FILE, HADOOP_SECURITY_USERNAME); return; } // if we made it here then there is not a current TGT found in cache that matches the principal and we need to request a new TGT String[] kinitCmd = { "kinit", "-kt", conf.get(HADOOP_SECURITY_USER_KEYTAB_FILE), principal }; try { Process kinit = Runtime.getRuntime().exec(kinitCmd); int rt = kinit.waitFor(); if (rt != 0) { BufferedReader errOut = new BufferedReader(new InputStreamReader(kinit.getErrorStream())); String line; String errOutput = ""; while ((line = errOut.readLine()) != null) { errOutput += line; } throw new IOException(String.format( "Failed to Acquire TGT using command \"kinit -kt\" with configured keytab and principal settings:\n%s", errOutput)); } } catch (InterruptedException e) { throw new InterruptedException(String.format( "command \"kinit -kt\" with configured keytab and principal settings:\n%s", e.getMessage())); } }
From source file:com.facebook.presto.hive.util.SecurityUtils.java
License:Apache License
/** * kerberos login/*from w w w. j av a 2 s . c o m*/ * * @param principalConf etc. hadoop-data/_HOST@SANKUAI.COM * etc. For UDDP umr-jc***@UCLOUD.CN * @param keytabFile keytab file * @return the login UserGroupInformation * * @throws IOException */ public static UserGroupInformation login(String principalConf, String keytabFile) throws IOException { if (UserGroupInformation.isSecurityEnabled()) { String kerberosName = SecurityUtil.getServerPrincipal(principalConf, "umr-jcjky1"); UserGroupInformation.loginUserFromKeytab("umr-jcjky1@UCLOUD.CN", keytabFile); } return null; }
From source file:com.gemstone.gemfire.cache.hdfs.internal.HDFSStoreImpl.java
License:Apache License
private FileSystem createFileSystem(Configuration hconf, String configFile, boolean forceNew) throws IOException { FileSystem filesystem = null; // load hdfs client config file if specified. The path is on local file // system//from w ww.ja va 2 s.c o m if (configFile != null) { if (logger.isDebugEnabled()) { logger.debug("{}Adding resource config file to hdfs configuration:" + configFile, logPrefix); } hconf.addResource(new Path(configFile)); if (!new File(configFile).exists()) { logger.warn(LocalizedMessage.create(LocalizedStrings.HOPLOG_HDFS_CLIENT_CONFIG_FILE_ABSENT, configFile)); } } // This setting disables shutdown hook for file system object. Shutdown // hook may cause FS object to close before the cache or store and // unpredictable behavior. This setting is provided for GFXD like server // use cases where FS close is managed by a server. This setting is not // supported by old versions of hadoop, HADOOP-4829 hconf.setBoolean("fs.automatic.close", false); // Hadoop has a configuration parameter io.serializations that is a list of serialization // classes which can be used for obtaining serializers and deserializers. This parameter // by default contains avro classes. When a sequence file is created, it calls // SerializationFactory.getSerializer(keyclass). This internally creates objects using // reflection of all the classes that were part of io.serializations. But since, there is // no avro class available it throws an exception. // Before creating a sequenceFile, override the io.serializations parameter and pass only the classes // that are important to us. hconf.setStrings("io.serializations", new String[] { "org.apache.hadoop.io.serializer.WritableSerialization" }); // create writer SchemaMetrics.configureGlobally(hconf); String nameNodeURL = null; if ((nameNodeURL = getNameNodeURL()) == null) { nameNodeURL = hconf.get("fs.default.name"); } URI namenodeURI = URI.create(nameNodeURL); //if (! GemFireCacheImpl.getExisting().isHadoopGfxdLonerMode()) { String authType = hconf.get("hadoop.security.authentication"); //The following code handles Gemfire XD with secure HDFS //A static set is used to cache all known secure HDFS NameNode urls. UserGroupInformation.setConfiguration(hconf); //Compare authentication method ignoring case to make GFXD future version complaint //At least version 2.0.2 starts complaining if the string "kerberos" is not in all small case. //However it seems current version of hadoop accept the authType in any case if (authType.equalsIgnoreCase("kerberos")) { String principal = hconf.get(HoplogConfig.KERBEROS_PRINCIPAL); String keyTab = hconf.get(HoplogConfig.KERBEROS_KEYTAB_FILE); if (!PERFORM_SECURE_HDFS_CHECK) { if (logger.isDebugEnabled()) logger.debug("{}Ignore secure hdfs check", logPrefix); } else { if (!secureNameNodes.contains(nameNodeURL)) { if (logger.isDebugEnabled()) logger.debug("{}Executing secure hdfs check", logPrefix); try { filesystem = FileSystem.newInstance(namenodeURI, hconf); //Make sure no IOExceptions are generated when accessing insecure HDFS. filesystem.listFiles(new Path("/"), false); throw new HDFSIOException( "Gemfire XD HDFS client and HDFS cluster security levels do not match. The configured HDFS Namenode is not secured."); } catch (IOException ex) { secureNameNodes.add(nameNodeURL); } finally { //Close filesystem to avoid resource leak if (filesystem != null) { closeFileSystemIgnoreError(filesystem); } } } } // check to ensure the namenode principal is defined String nameNodePrincipal = hconf.get("dfs.namenode.kerberos.principal"); if (nameNodePrincipal == null) { throw new IOException(LocalizedStrings.GF_KERBEROS_NAMENODE_PRINCIPAL_UNDEF.toLocalizedString()); } // ok, the user specified a gfxd principal so we will try to login if (principal != null) { //If NameNode principal is the same as Gemfire XD principal, there is a //potential security hole String regex = "[/@]"; if (nameNodePrincipal != null) { String HDFSUser = nameNodePrincipal.split(regex)[0]; String GFXDUser = principal.split(regex)[0]; if (HDFSUser.equals(GFXDUser)) { logger.warn( LocalizedMessage.create(LocalizedStrings.HDFS_USER_IS_SAME_AS_GF_USER, GFXDUser)); } } // a keytab must exist if the user specifies a principal if (keyTab == null) { throw new IOException(LocalizedStrings.GF_KERBEROS_KEYTAB_UNDEF.toLocalizedString()); } // the keytab must exist as well File f = new File(keyTab); if (!f.exists()) { throw new FileNotFoundException( LocalizedStrings.GF_KERBEROS_KEYTAB_FILE_ABSENT.toLocalizedString(f.getAbsolutePath())); } //Authenticate Gemfire XD principal to Kerberos KDC using Gemfire XD keytab file String principalWithValidHost = SecurityUtil.getServerPrincipal(principal, ""); UserGroupInformation.loginUserFromKeytab(principalWithValidHost, keyTab); } else { logger.warn(LocalizedMessage.create(LocalizedStrings.GF_KERBEROS_PRINCIPAL_UNDEF)); } } //} filesystem = getFileSystemFactory().create(namenodeURI, hconf, forceNew); if (logger.isDebugEnabled()) { logger.debug("{}Initialized FileSystem linked to " + filesystem.getUri() + " " + filesystem.hashCode(), logPrefix); } return filesystem; }
From source file:com.huayu.metis.flume.sink.hdfs.HDFSEventSink.java
License:Apache License
private boolean authenticate() { // logic for kerberos login boolean useSecurity = UserGroupInformation.isSecurityEnabled(); LOG.info("Hadoop Security enabled: " + useSecurity); if (useSecurity) { // sanity checking if (kerbConfPrincipal.isEmpty()) { LOG.error("Hadoop running in secure mode, but Flume config doesn't " + "specify a principal to use for Kerberos auth."); return false; }//from w w w . jav a2 s.co m if (kerbKeytab.isEmpty()) { LOG.error("Hadoop running in secure mode, but Flume config doesn't " + "specify a keytab to use for Kerberos auth."); return false; } else { //If keytab is specified, user should want it take effect. //HDFSEventSink will halt when keytab file is non-exist or unreadable File kfile = new File(kerbKeytab); if (!(kfile.isFile() && kfile.canRead())) { throw new IllegalArgumentException( "The keyTab file: " + kerbKeytab + " is nonexistent or can't read. " + "Please specify a readable keytab file for Kerberos auth."); } } String principal; try { // resolves _HOST pattern using standard Hadoop search/replace // via DNS lookup when 2nd argument is empty principal = SecurityUtil.getServerPrincipal(kerbConfPrincipal, ""); } catch (IOException e) { LOG.error("Host lookup error resolving kerberos principal (" + kerbConfPrincipal + "). Exception follows.", e); return false; } Preconditions.checkNotNull(principal, "Principal must not be null"); KerberosUser prevUser = staticLogin.get(); KerberosUser newUser = new KerberosUser(principal, kerbKeytab); // be cruel and unusual when user tries to login as multiple principals // this isn't really valid with a reconfigure but this should be rare // enough to warrant a restart of the agent JVM // TODO: find a way to interrogate the entire current config state, // since we don't have to be unnecessarily protective if they switch all // HDFS sinks to use a different principal all at once. Preconditions.checkState(prevUser == null || prevUser.equals(newUser), "Cannot use multiple kerberos principals in the same agent. " + " Must restart agent to use new principal or keytab. " + "Previous = %s, New = %s", prevUser, newUser); // attempt to use cached credential if the user is the same // this is polite and should avoid flooding the KDC with auth requests UserGroupInformation curUser = null; if (prevUser != null && prevUser.equals(newUser)) { try { curUser = UserGroupInformation.getLoginUser(); } catch (IOException e) { LOG.warn("User unexpectedly had no active login. Continuing with " + "authentication", e); } } if (curUser == null || !curUser.getUserName().equals(principal)) { try { // static login kerberosLogin(this, principal, kerbKeytab); } catch (IOException e) { LOG.error("Authentication or file read error while attempting to " + "login as kerberos principal (" + principal + ") using " + "keytab (" + kerbKeytab + "). Exception follows.", e); return false; } } else { LOG.debug("{}: Using existing principal login: {}", this, curUser); } // we supposedly got through this unscathed... so store the static user staticLogin.set(newUser); } // hadoop impersonation works with or without kerberos security proxyTicket = null; if (!proxyUserName.isEmpty()) { try { proxyTicket = UserGroupInformation.createProxyUser(proxyUserName, UserGroupInformation.getLoginUser()); } catch (IOException e) { LOG.error("Unable to login as proxy user. Exception follows.", e); return false; } } UserGroupInformation ugi = null; if (proxyTicket != null) { ugi = proxyTicket; } else if (useSecurity) { try { ugi = UserGroupInformation.getLoginUser(); } catch (IOException e) { LOG.error("Unexpected error: Unable to get authenticated user after " + "apparent successful login! Exception follows.", e); return false; } } if (ugi != null) { // dump login information AuthenticationMethod authMethod = ugi.getAuthenticationMethod(); LOG.info("Auth method: {}", authMethod); LOG.info(" User name: {}", ugi.getUserName()); LOG.info(" Using keytab: {}", ugi.isFromKeytab()); if (authMethod == AuthenticationMethod.PROXY) { UserGroupInformation superUser; try { superUser = UserGroupInformation.getLoginUser(); LOG.info(" Superuser auth: {}", superUser.getAuthenticationMethod()); LOG.info(" Superuser name: {}", superUser.getUserName()); LOG.info(" Superuser using keytab: {}", superUser.isFromKeytab()); } catch (IOException e) { LOG.error("Unexpected error: unknown superuser impersonating proxy.", e); return false; } } LOG.info("Logged in as user {}", ugi.getUserName()); return true; } return true; }
From source file:com.indeed.imhotep.builder.tsv.KerberosUtils.java
License:Apache License
private static void with(String principal, String keytabPath) throws IOException { log.info("Setting keytab file of " + keytabPath + ", and principal to " + principal); checkArgument(!Strings.isNullOrEmpty(principal), "Unable to use a null/empty principal for keytab"); checkArgument(!Strings.isNullOrEmpty(keytabPath), "Unable to use a null/empty keytab path"); // do hostname substitution final String realPrincipal = SecurityUtil.getServerPrincipal(principal, (String) null); // actually login try {/*ww w . j a va 2 s. co m*/ UserGroupInformation.loginUserFromKeytab(realPrincipal, keytabPath); } catch (IOException e) { checkKnownErrors(realPrincipal, e); throw e; } }
From source file:com.redsqirl.workflow.server.connect.ServerMain.java
License:Open Source License
public static void main(String[] arg) throws RemoteException { String userName = System.getProperty("user.name"); int port = 2001; if (arg.length > 0) { try {//www. java 2 s .com port = Integer.valueOf(arg[0]); } catch (Exception e) { port = 2001; } } //Loads preferences WorkflowPrefManager runner = WorkflowPrefManager.getInstance(); if (runner.isInit()) { // Loads in the log settings. BasicConfigurator.configure(); try { if (WorkflowPrefManager.getSysProperty("core.workflow_lib_path") != null) { Logger.getRootLogger().setLevel(Level.DEBUG); } else { Logger.getRootLogger().setLevel(Level.INFO); } Logger.getRootLogger().addAppender( new FileAppender(new PatternLayout("[%d{MMM dd HH:mm:ss}] %-5p (%F:%L) - %m%n"), WorkflowPrefManager.getPathuserpref() + "/redsqirl-workflow.log")); } catch (Exception e) { logger.error("Fail to write log in temporary folder"); } logger = Logger.getLogger(ServerMain.class); //Setup the user home if not setup yet WorkflowPrefManager.setupHome(); WorkflowPrefManager.createUserFooter(); NameNodeVar.set(WorkflowPrefManager.getSysProperty(WorkflowPrefManager.sys_namenode)); NameNodeVar.setJobTracker(WorkflowPrefManager.getSysProperty(WorkflowPrefManager.sys_jobtracker)); logger.debug("sys_namenode Path: " + NameNodeVar.get()); //Login on kerberos if necessary if (WorkflowPrefManager.isSecEnable()) { logger.info("Security enabled"); String hostname = WorkflowPrefManager.getProperty(WorkflowPrefManager.sys_sec_hostname); String keytabTemplate = WorkflowPrefManager .getProperty(WorkflowPrefManager.sys_keytab_pat_template); String realm = WorkflowPrefManager.getProperty(WorkflowPrefManager.sys_kerberos_realm); if (keytabTemplate != null) { try { String keytab = keytabTemplate.replaceAll("_USER", userName); try { //Update Hadoop security configurations NameNodeVar.addToDefaultConf(HADOOP_SECURITY_AUTHENTICATION, KERBEROS.toString()); NameNodeVar.addToDefaultConf(NameNodeVar.SERVER_KEYTAB_KEY, keytab); NameNodeVar.addToDefaultConf(NameNodeVar.SERVER_PRINCIPAL_KEY, userName + "/_HOST@" + realm); } catch (Exception e) { logger.error(e, e); } Configuration conf = NameNodeVar.getConf(); logger.info(NameNodeVar.getConfStr(conf)); SecurityUtil.setAuthenticationMethod(KERBEROS, conf); logger.info("Keytab: " + keytab); logger.info("user: " + userName); Process p = Runtime.getRuntime().exec("kinit -k -t " + keytab + " " + SecurityUtil .getServerPrincipal(conf.get(NameNodeVar.SERVER_PRINCIPAL_KEY), hostname)); p.waitFor(); } catch (Exception e) { logger.error("Fail to register to on kerberos: " + e, e); } } } else { logger.info("Security disabled"); } try { logger.debug("start server main"); String nameWorkflow = userName + "@wfm"; String nameHDFS = userName + "@hdfs"; String nameHDFSBrowser = userName + "@hdfsbrowser"; String nameHcat = userName + "@hcat"; String nameJdbc = userName + "@jdbc"; String nameSshArray = userName + "@ssharray"; String nameOozie = userName + "@oozie"; String namePrefs = userName + "@prefs"; String nameSuperActionManager = userName + "@samanager"; try { registry = LocateRegistry.createRegistry(port); logger.debug(" ---------------- create registry"); } catch (Exception e) { registry = LocateRegistry.getRegistry(port); logger.debug(" ---------------- Got registry"); } int i = 0; DataFlowInterface dfi = (DataFlowInterface) WorkflowInterface.getInstance(); while (i < 40) { try { registry.rebind(nameWorkflow, dfi); break; } catch (Exception e) { ++i; Thread.sleep(1000); logger.debug("Sleep " + i); } } logger.debug("nameWorkflow: " + nameWorkflow); registry.rebind(nameJdbc, (DataStore) new JdbcStore()); logger.debug("nameJdbc: " + nameJdbc); registry.rebind(nameHcat, (DataStore) new HCatStore()); logger.debug("nameJdbc: " + nameJdbc); registry.rebind(nameOozie, (JobManager) OozieManager.getInstance()); logger.debug("nameOozie: " + nameOozie); registry.rebind(nameSshArray, (SSHDataStoreArray) SSHInterfaceArray.getInstance()); logger.debug("nameSshArray: " + nameSshArray); registry.rebind(nameHDFS, (DataStore) new HDFSInterface()); logger.debug("nameHDFS: " + nameHDFS); registry.rebind(nameHDFSBrowser, (DataStore) new HDFSInterface()); logger.debug("nameHDFSBrowser: " + nameHDFSBrowser); registry.rebind(namePrefs, (PropertiesManager) WorkflowPrefManager.getProps()); logger.debug("namePrefs: " + namePrefs); logger.debug("nameHDFS: " + nameSuperActionManager); registry.rebind(nameSuperActionManager, (ModelManagerInt) new ModelManager()); logger.debug("end server main"); } catch (IOException e) { logger.error(e.getMessage(), e); System.exit(1); } catch (Exception e) { logger.error(e.getMessage(), e); System.exit(1); } } }