List of usage examples for org.apache.hadoop.security UserGroupInformation setAuthenticationMethod
public void setAuthenticationMethod(AuthMethod authMethod)
From source file:org.apache.ambari.view.filebrowser.HdfsApi.java
License:Apache License
private UserGroupInformation getProxyUser() throws IOException { UserGroupInformation proxyuser; if (params.containsKey("proxyuser")) { proxyuser = UserGroupInformation.createRemoteUser(params.get("proxyuser")); } else {//from w ww . j a va 2 s .c o m proxyuser = UserGroupInformation.getCurrentUser(); } proxyuser.setAuthenticationMethod(getAuthenticationMethod()); return proxyuser; }
From source file:org.apache.ambari.view.utils.hdfs.HdfsApi.java
License:Apache License
private UserGroupInformation getProxyUser() throws IOException { UserGroupInformation proxyuser; if (authParams.containsKey("proxyuser")) { proxyuser = UserGroupInformation.createRemoteUser(authParams.get("proxyuser")); } else {/* w w w .j av a2s .c o m*/ proxyuser = UserGroupInformation.getCurrentUser(); } proxyuser.setAuthenticationMethod(getAuthenticationMethod()); return proxyuser; }
From source file:org.apache.streams.hdfs.WebHdfsPersistReader.java
License:Apache License
private synchronized void connectToWebHDFS() { try {//w w w. jav a 2 s . c om LOGGER.info("User : {}", this.hdfsConfiguration.getUser()); UserGroupInformation ugi = UserGroupInformation.createRemoteUser(this.hdfsConfiguration.getUser()); ugi.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.SIMPLE); ugi.doAs(new PrivilegedExceptionAction<Void>() { public Void run() throws Exception { Configuration conf = new Configuration(); conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); LOGGER.info("WebURI : {}", getURI().toString()); client = FileSystem.get(getURI(), conf); LOGGER.info("Connected to WebHDFS"); /* * ************************************************************************************************ * This code is an example of how you would work with HDFS and you weren't going over * the webHDFS protocol. * * Smashew: 2013-10-01 * ************************************************************************************************ conf.set("fs.defaultFS", "hdfs://hadoop.mdigitallife.com:8020/user/" + userName); conf.set("namenode.host","0.0.0.0"); conf.set("hadoop.job.ugi", userName); conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, "runner"); fileSystem.createNewFile(new Path("/user/"+ userName + "/test")); FileStatus[] status = fs.listStatus(new Path("/user/" + userName)); for(int i=0;i<status.length;i++) { LOGGER.info("Directory: {}", status[i].getPath()); } */ return null; } }); } catch (Exception e) { LOGGER.error("There was an error connecting to WebHDFS, please check your settings and try again"); e.printStackTrace(); } }
From source file:org.apache.streams.hdfs.WebHdfsPersistWriter.java
License:Apache License
private synchronized void connectToWebHDFS() { try {/* ww w . ja va 2s . co m*/ LOGGER.info("User : {}", this.hdfsConfiguration.getUser()); UserGroupInformation ugi = UserGroupInformation.createRemoteUser(this.hdfsConfiguration.getUser()); ugi.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.SIMPLE); ugi.doAs(new PrivilegedExceptionAction<Void>() { public Void run() throws Exception { Configuration conf = new Configuration(); conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); LOGGER.info("WebURI : {}", getURI().toString()); client = FileSystem.get(getURI(), conf); LOGGER.info("Connected to WebHDFS"); /* * ************************************************************************************************ * This code is an example of how you would work with HDFS and you weren't going over * the webHDFS protocol. * * Smashew: 2013-10-01 * ************************************************************************************************ conf.set("fs.defaultFS", "hdfs://hadoop.mdigitallife.com:8020/user/" + userName); conf.set("namenode.host","0.0.0.0"); conf.set("hadoop.job.ugi", userName); conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, "runner"); fileSystem.createNewFile(new Path("/user/"+ userName + "/test")); FileStatus[] status = fs.listStatus(new Path("/user/" + userName)); for(int i=0;i<status.length;i++) { LOGGER.info("Directory: {}", status[i].getPath()); } */ return null; } }); } catch (Exception e) { LOGGER.error("There was an error connecting to WebHDFS, please check your settings and try again", e); throw new RuntimeException(e); } }
From source file:oz.hadoop.yarn.test.cluster.InJvmContainerExecutor.java
License:Apache License
/** * * @param container//from w w w.j a v a 2 s . c o m * @param containerWorkDir * @return */ private UserGroupInformation buildUgiForContainerLaunching(Container container, final Path containerWorkDir) { UserGroupInformation ugi; try { ugi = UserGroupInformation.createRemoteUser(UserGroupInformation.getLoginUser().getUserName()); ugi.setAuthenticationMethod(AuthMethod.TOKEN); String filePath = new Path(containerWorkDir, ContainerLaunch.FINAL_CONTAINER_TOKENS_FILE).toString(); Credentials credentials = Credentials.readTokenStorageFile(new File(filePath), this.getConf()); Collection<Token<? extends TokenIdentifier>> tokens = credentials.getAllTokens(); for (Token<? extends TokenIdentifier> token : tokens) { ugi.addToken(token); } } catch (Exception e) { throw new IllegalArgumentException( "Failed to build UserGroupInformation to launch container " + container, e); } return ugi; }