Example usage for org.apache.hadoop.hdfs.protocol DatanodeInfo getRemaining

List of usage examples for org.apache.hadoop.hdfs.protocol DatanodeInfo getRemaining

Introduction

In this page you can find the example usage for org.apache.hadoop.hdfs.protocol DatanodeInfo getRemaining.

Prototype

public long getRemaining() 

Source Link

Document

The raw free space.

Usage

From source file:com.bigstep.datalake.JsonUtil.java

License:Apache License

/** Convert a DatanodeInfo to a Json map. */
static Map<String, Object> toJsonMap(final DatanodeInfo datanodeinfo) {
    if (datanodeinfo == null) {
        return null;
    }//from   www  .j  a  v  a  2 s . c om

    // TODO: Fix storageID
    final Map<String, Object> m = new TreeMap<String, Object>();
    m.put("ipAddr", datanodeinfo.getIpAddr());
    // 'name' is equivalent to ipAddr:xferPort. Older clients (1.x, 0.23.x)
    // expects this instead of the two fields.
    m.put("name", datanodeinfo.getXferAddr());
    m.put("hostName", datanodeinfo.getHostName());
    m.put("storageID", datanodeinfo.getDatanodeUuid());
    m.put("xferPort", datanodeinfo.getXferPort());
    m.put("infoPort", datanodeinfo.getInfoPort());
    m.put("infoSecurePort", datanodeinfo.getInfoSecurePort());
    m.put("ipcPort", datanodeinfo.getIpcPort());

    m.put("capacity", datanodeinfo.getCapacity());
    m.put("dfsUsed", datanodeinfo.getDfsUsed());
    m.put("remaining", datanodeinfo.getRemaining());
    m.put("blockPoolUsed", datanodeinfo.getBlockPoolUsed());
    m.put("cacheCapacity", datanodeinfo.getCacheCapacity());
    m.put("cacheUsed", datanodeinfo.getCacheUsed());
    m.put("lastUpdate", datanodeinfo.getLastUpdate());
    m.put("lastUpdateMonotonic", datanodeinfo.getLastUpdateMonotonic());
    m.put("xceiverCount", datanodeinfo.getXceiverCount());
    m.put("networkLocation", datanodeinfo.getNetworkLocation());
    m.put("adminState", datanodeinfo.getAdminState().name());
    return m;
}

From source file:org.openflamingo.remote.thrift.thriftfs.ThriftUtils.java

License:Apache License

public static DatanodeInfo toThrift(org.apache.hadoop.hdfs.protocol.DatanodeInfo node,
        Map<DatanodeID, Integer> thriftPorts) {
    if (node == null) {
        return new DatanodeInfo();
    }//from  w  ww .j  a va 2  s.  c om

    DatanodeInfo ret = new DatanodeInfo();
    ret.name = node.getName();
    ret.storageID = node.storageID;
    ret.host = node.getHost();
    Integer p = thriftPorts.get(node);
    if (p == null) {
        LOG.warn("Unknown Thrift port for datanode " + node.name);
        ret.thriftPort = Constants.UNKNOWN_THRIFT_PORT;
    } else {
        ret.thriftPort = p.intValue();
    }

    ret.capacity = node.getCapacity();
    ret.dfsUsed = node.getDfsUsed();
    ret.remaining = node.getRemaining();
    ret.xceiverCount = node.getXceiverCount();
    ret.state = node.isDecommissioned() ? DatanodeState.DECOMMISSIONED
            : node.isDecommissionInProgress() ? DatanodeState.DECOMMISSION_INPROGRESS
                    : DatanodeState.NORMAL_STATE;
    ret.httpPort = node.getInfoPort();

    long timestamp = node.getLastUpdate();
    long currentTime = System.currentTimeMillis();
    ret.millisSinceUpdate = currentTime - timestamp;

    return ret;
}