List of usage examples for org.apache.hadoop.hdfs HdfsConfiguration set
public void set(String name, String value)
value of the name property. From source file:co.cask.cdap.operations.hdfs.HDFSInfo.java
License:Apache License
@Nullable private URL getHAWebURL() throws IOException { String activeNamenode = null; String nameService = getNameService(); HdfsConfiguration hdfsConf = new HdfsConfiguration(conf); String nameNodePrincipal = conf.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, ""); hdfsConf.set(CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_USER_NAME_KEY, nameNodePrincipal); for (String nnId : DFSUtil.getNameNodeIds(conf, nameService)) { HAServiceTarget haServiceTarget = new NNHAServiceTarget(hdfsConf, nameService, nnId); HAServiceProtocol proxy = haServiceTarget.getProxy(hdfsConf, 10000); HAServiceStatus serviceStatus = proxy.getServiceStatus(); if (HAServiceProtocol.HAServiceState.ACTIVE != serviceStatus.getState()) { continue; }/*from w w w .j a v a2s . c o m*/ activeNamenode = DFSUtil.getNamenodeServiceAddr(hdfsConf, nameService, nnId); } if (activeNamenode == null) { throw new IllegalStateException("Could not find an active namenode"); } return rpcToHttpAddress(URI.create(activeNamenode)); }
From source file:io.confluent.connect.hdfs.TestWithSecureMiniDFSCluster.java
License:Apache License
private Configuration createSecureConfig(String dataTransferProtection) throws Exception { HdfsConfiguration conf = new HdfsConfiguration(); SecurityUtil.setAuthenticationMethod(UserGroupInformation.AuthenticationMethod.KERBEROS, conf); conf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal); conf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab); conf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, hdfsPrincipal); conf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab); conf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, spnegoPrincipal); conf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true); conf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, dataTransferProtection); conf.set(DFS_HTTP_POLICY_KEY, HttpConfig.Policy.HTTPS_ONLY.name()); conf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0"); conf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0"); conf.setInt(IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SASL_KEY, 10); conf.set(DFS_ENCRYPT_DATA_TRANSFER_KEY, "true");//https://issues.apache.org/jira/browse/HDFS-7431 String keystoresDir = baseDir.getAbsolutePath(); String sslConfDir = KeyStoreTestUtil.getClasspathDir(this.getClass()); KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, conf, false); return conf;/*from w ww . j av a 2 s . com*/ }
From source file:org.apache.ambari.view.slider.SliderAppsViewControllerImpl.java
License:Apache License
private <T> T invokeHDFSClientRunnable(final HDFSClientRunnable<T> runnable, final Map<String, String> hadoopConfigs) throws IOException, InterruptedException { ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(getClass().getClassLoader()); try {/*w w w .j a v a 2 s . c o m*/ boolean securityEnabled = Boolean.valueOf(hadoopConfigs.get("security_enabled")); final HdfsConfiguration hdfsConfiguration = new HdfsConfiguration(); for (Entry<String, String> entry : hadoopConfigs.entrySet()) { hdfsConfiguration.set(entry.getKey(), entry.getValue()); } UserGroupInformation.setConfiguration(hdfsConfiguration); UserGroupInformation sliderUser; String loggedInUser = getUserToRunAs(hadoopConfigs); if (securityEnabled) { String viewPrincipal = getViewParameterValue(PARAM_VIEW_PRINCIPAL); String viewPrincipalKeytab = getViewParameterValue(PARAM_VIEW_PRINCIPAL_KEYTAB); UserGroupInformation ambariUser = UserGroupInformation .loginUserFromKeytabAndReturnUGI(viewPrincipal, viewPrincipalKeytab); if (loggedInUser.equals(ambariUser.getShortUserName())) { // HDFS throws exception when caller tries to impresonate themselves. // User: admin@EXAMPLE.COM is not allowed to impersonate admin sliderUser = ambariUser; } else { sliderUser = UserGroupInformation.createProxyUser(loggedInUser, ambariUser); } } else { sliderUser = UserGroupInformation.getBestUGI(null, loggedInUser); } try { T value = sliderUser.doAs(new PrivilegedExceptionAction<T>() { @Override public T run() throws Exception { String fsPath = hadoopConfigs.get("fs.defaultFS"); FileSystem fs = FileSystem.get(URI.create(fsPath), hdfsConfiguration); try { return runnable.run(fs); } finally { fs.close(); } } }); return value; } catch (UndeclaredThrowableException e) { throw e; } } finally { Thread.currentThread().setContextClassLoader(currentClassLoader); } }
From source file:org.testifyproject.resource.hdfs.MiniDFSResource.java
License:Apache License
@Override public HdfsConfiguration configure(TestContext testContext, LocalResource localResource, PropertiesReader configReader) { String testName = testContext.getName(); String hdfsDirectory = fileSystemUtil.createPath("target", "hdfs", testName); HdfsConfiguration configuration = new HdfsConfiguration(); configuration.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsDirectory); return configuration; }
From source file:org.testifyproject.resource.hdfs.MiniDFSResource.java
License:Apache License
@Override public LocalResourceInstance<MiniDFSCluster, DistributedFileSystem> start(TestContext testContext, LocalResource localResource, HdfsConfiguration config) throws Exception { String hdfsDirectory = config.get(MiniDFSCluster.HDFS_MINIDFS_BASEDIR); fileSystemUtil.recreateDirectory(hdfsDirectory); config.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsDirectory); MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(config); hdfsCluster = builder.build();//from w w w . j a v a 2 s . c o m fileSystem = hdfsCluster.getFileSystem(); return LocalResourceInstanceBuilder.builder().resource(hdfsCluster).client(fileSystem).build("hdfs", localResource); }