Example usage for org.apache.hadoop.hdfs DFSConfigKeys DFS_NAMENODE_HTTP_ADDRESS_KEY

List of usage examples for org.apache.hadoop.hdfs DFSConfigKeys DFS_NAMENODE_HTTP_ADDRESS_KEY

Introduction

In this page you can find the example usage for org.apache.hadoop.hdfs DFSConfigKeys DFS_NAMENODE_HTTP_ADDRESS_KEY.

Prototype

String DFS_NAMENODE_HTTP_ADDRESS_KEY

To view the source code for org.apache.hadoop.hdfs DFSConfigKeys DFS_NAMENODE_HTTP_ADDRESS_KEY.

Click Source Link

Usage

From source file:co.cask.cdap.operations.hdfs.HDFSInfo.java

License:Apache License

@Nullable
private URL rpcToHttpAddress(URI rpcURI) throws MalformedURLException {
    String host = rpcURI.getHost();
    if (host == null) {
        return null;
    }//w w w.  j av a  2  s .c om
    boolean httpsEnabled = conf.getBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY,
            DFSConfigKeys.DFS_HTTPS_ENABLE_DEFAULT);
    String namenodeWebAddress = httpsEnabled
            ? conf.get(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY,
                    DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT)
            : conf.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY,
                    DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_DEFAULT);
    InetSocketAddress socketAddress = NetUtils.createSocketAddr(namenodeWebAddress);
    int namenodeWebPort = socketAddress.getPort();
    String protocol = httpsEnabled ? "https" : "http";
    return new URL(protocol, host, namenodeWebPort, "");
}

From source file:common.NameNode.java

License:Apache License

protected InetSocketAddress getHttpServerAddress(Configuration conf) {
    return NetUtils.createSocketAddr(conf.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, "0.0.0.0:50070"));
}

From source file:common.NameNode.java

License:Apache License

protected void setHttpServerAddress(Configuration conf) {
    conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, getHostPortString(httpAddress));
}

From source file:io.hops.transaction.TestTransaction.java

License:Apache License

@BeforeClass
public static void setupCluster() throws Exception {
    Configuration conf = new HdfsConfiguration();

    conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, "0.0.0.0:0");
    conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_AVOID_STALE_DATANODE_FOR_READ_KEY, true);
    conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_AVOID_STALE_DATANODE_FOR_WRITE_KEY, true);
    conf.setInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_INTERVAL_KEY, 10);

    cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0).storagesPerDatanode(1).build();

    cluster.waitActive();// www . j ava 2 s . c  o m

}

From source file:org.kitesdk.data.spi.filesystem.TestHDFSDatasetURIs.java

License:Apache License

@Test
public void testAbsoluteWebHdfs() {
    Assume.assumeTrue(!Hadoop.isHadoop1());

    String webhdfsAuth = getConfiguration().get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY);
    DatasetRepository repo = DatasetRepositories.repositoryFor("repo:webhdfs://" + webhdfsAuth + "/tmp/data");
    repo.delete("ns", "test");
    repo.create("ns", "test", descriptor);

    Dataset<Object> ds = Datasets.<Object, Dataset<Object>>load(
            "dataset:webhdfs://" + webhdfsAuth + "/tmp/data/ns/test", Object.class);

    Assert.assertNotNull("Should load dataset", ds);
    Assert.assertTrue(ds instanceof FileSystemDataset);
    Assert.assertEquals("Locations should match", URI.create("webhdfs://" + webhdfsAuth + "/tmp/data/ns/test"),
            ds.getDescriptor().getLocation());
    Assert.assertEquals("Descriptors should match", repo.load("ns", "test").getDescriptor(),
            ds.getDescriptor());//from   w  w  w .j  a  v a2 s.  co  m
    Assert.assertEquals("Should report correct namespace", "ns", ds.getNamespace());
    Assert.assertEquals("Should report correct name", "test", ds.getName());

    repo.delete("ns", "test");
}