List of usage examples for org.apache.hadoop.hdfs HAUtil isHAEnabled
public static boolean isHAEnabled(Configuration conf, String nsId)
From source file:co.cask.cdap.operations.hdfs.HDFSInfo.java
License:Apache License
@Override public String getWebURL() { try {/*from w w w .j a va 2 s . c o m*/ if (HAUtil.isHAEnabled(conf, getNameService())) { URL haWebURL = getHAWebURL(); if (haWebURL != null) { return haWebURL.toString(); } } else { try (FileSystem fs = FileSystem.get(conf)) { URL webUrl = rpcToHttpAddress(fs.getUri()); if (webUrl != null) { return webUrl.toString(); } } } lastCollectFailed = false; } catch (Exception e) { // TODO: remove once CDAP-7887 is fixed if (!lastCollectFailed) { LOG.warn( "Error in determining HDFS URL. Web URL of HDFS will not be available in HDFS operational stats.", e); } lastCollectFailed = true; } return null; }
From source file:co.cask.cdap.operations.hdfs.HDFSNodes.java
License:Apache License
private List<String> getNameNodes() throws IOException { List<String> namenodes = new ArrayList<>(); if (!HAUtil.isHAEnabled(conf, getNameService())) { try (FileSystem fs = FileSystem.get(conf)) { return Collections.singletonList(fs.getUri().toString()); }/*from ww w . j ava 2s . c o m*/ } String nameService = getNameService(); for (String nnId : DFSUtil.getNameNodeIds(conf, nameService)) { namenodes.add(DFSUtil.getNamenodeServiceAddr(conf, nameService, nnId)); } return namenodes; }
From source file:org.apache.hive.jdbc.TestJdbcWithMiniHA.java
License:Apache License
@BeforeClass public static void beforeTest() throws Exception { Class.forName(MiniHS2.getJdbcDriverName()); conf = new HiveConf(); conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); String dataFileDir = conf.get("test.data.files").replace('\\', '/').replace("c:", ""); dataFilePath = new Path(dataFileDir, "kv1.txt"); DriverManager.setLoginTimeout(0); conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false); miniHS2 = new MiniHS2.Builder().withConf(conf).withMiniMR().withHA().build(); Map<String, String> overlayProps = new HashMap<String, String>(); overlayProps.put(ConfVars.HIVE_SERVER2_SESSION_HOOK.varname, HATestSessionHook.class.getName()); miniHS2.start(overlayProps);// w ww .j a va 2 s . c om assertTrue(HAUtil.isHAEnabled(conf, DFSUtil.getNamenodeNameServiceId(conf))); createDb(); }
From source file:org.apache.twill.internal.yarn.AbstractYarnTwillService.java
License:Apache License
private static void cloneHaNnCredentials(Location location, UserGroupInformation ugi) throws IOException { Configuration hConf = getConfiguration(location.getLocationFactory()); String scheme = location.toURI().getScheme(); Map<String, Map<String, InetSocketAddress>> nsIdMap = DFSUtil.getHaNnRpcAddresses(hConf); for (Map.Entry<String, Map<String, InetSocketAddress>> entry : nsIdMap.entrySet()) { String nsId = entry.getKey(); Map<String, InetSocketAddress> addressesInNN = entry.getValue(); if (!HAUtil.isHAEnabled(hConf, nsId) || addressesInNN == null || addressesInNN.isEmpty()) { continue; }/*from w ww. j av a 2 s .co m*/ // The client may have a delegation token set for the logical // URI of the cluster. Clone this token to apply to each of the // underlying IPC addresses so that the IPC code can find it. URI uri = URI.create(scheme + "://" + nsId); LOG.info("Cloning delegation token for uri {}", uri); HAUtil.cloneDelegationTokenForLogicalUri(ugi, uri, addressesInNN.values()); } }