Example usage for org.apache.hadoop.hdfs.protocol DatanodeInfo getNetworkLocation

List of usage examples for org.apache.hadoop.hdfs.protocol DatanodeInfo getNetworkLocation

Introduction

In this page you can find the example usage for org.apache.hadoop.hdfs.protocol DatanodeInfo getNetworkLocation.

Prototype

@Override
public String getNetworkLocation() 

Source Link

Document

network location

Usage

From source file:com.bigstep.datalake.JsonUtil.java

License:Apache License

/** Convert a DatanodeInfo to a Json map. */
static Map<String, Object> toJsonMap(final DatanodeInfo datanodeinfo) {
    if (datanodeinfo == null) {
        return null;
    }/*from   w  ww .  j a  v  a2  s  .co  m*/

    // TODO: Fix storageID
    final Map<String, Object> m = new TreeMap<String, Object>();
    m.put("ipAddr", datanodeinfo.getIpAddr());
    // 'name' is equivalent to ipAddr:xferPort. Older clients (1.x, 0.23.x)
    // expects this instead of the two fields.
    m.put("name", datanodeinfo.getXferAddr());
    m.put("hostName", datanodeinfo.getHostName());
    m.put("storageID", datanodeinfo.getDatanodeUuid());
    m.put("xferPort", datanodeinfo.getXferPort());
    m.put("infoPort", datanodeinfo.getInfoPort());
    m.put("infoSecurePort", datanodeinfo.getInfoSecurePort());
    m.put("ipcPort", datanodeinfo.getIpcPort());

    m.put("capacity", datanodeinfo.getCapacity());
    m.put("dfsUsed", datanodeinfo.getDfsUsed());
    m.put("remaining", datanodeinfo.getRemaining());
    m.put("blockPoolUsed", datanodeinfo.getBlockPoolUsed());
    m.put("cacheCapacity", datanodeinfo.getCacheCapacity());
    m.put("cacheUsed", datanodeinfo.getCacheUsed());
    m.put("lastUpdate", datanodeinfo.getLastUpdate());
    m.put("lastUpdateMonotonic", datanodeinfo.getLastUpdateMonotonic());
    m.put("xceiverCount", datanodeinfo.getXceiverCount());
    m.put("networkLocation", datanodeinfo.getNetworkLocation());
    m.put("adminState", datanodeinfo.getAdminState().name());
    return m;
}

From source file:com.sun.grid.herd.HerdJsv.java

License:Open Source License

/**
 * Take the list of HDFS data blocks and determine what racks house the
 * blocks and how many blocks are in each rack.
 * @param blocks a list of HDFS data blocks
 * @return a map of racks, where the key is the rack name, and the value
 * if the number of blocks in that rack//w  w  w.j  ava 2s .co m
 */
private static Map<String, Integer> collateRacks(Collection<LocatedBlock> blocks) {
    Map<String, Integer> racks = new HashMap<String, Integer>();

    if (blocks != null) {
        for (LocatedBlock block : blocks) {
            for (DatanodeInfo node : block.getLocations()) {
                String loc = node.getNetworkLocation();

                if (racks.containsKey(loc)) {
                    racks.put(loc, racks.get(loc) + 1);
                } else {
                    racks.put(loc, 1);
                }
            }
        }
    }

    return racks;
}