Example usage for org.apache.hadoop.hdfs.protocol DatanodeInfo getXferAddr

List of usage examples for org.apache.hadoop.hdfs.protocol DatanodeInfo getXferAddr

Introduction

In this page you can find the example usage for org.apache.hadoop.hdfs.protocol DatanodeInfo getXferAddr.

Prototype

public String getXferAddr() 

Source Link

Usage

From source file:com.bigstep.datalake.JsonUtil.java

License:Apache License

/** Convert a DatanodeInfo to a Json map. */
static Map<String, Object> toJsonMap(final DatanodeInfo datanodeinfo) {
    if (datanodeinfo == null) {
        return null;
    }/*from   w w  w .ja v a2  s  .c  om*/

    // TODO: Fix storageID
    final Map<String, Object> m = new TreeMap<String, Object>();
    m.put("ipAddr", datanodeinfo.getIpAddr());
    // 'name' is equivalent to ipAddr:xferPort. Older clients (1.x, 0.23.x)
    // expects this instead of the two fields.
    m.put("name", datanodeinfo.getXferAddr());
    m.put("hostName", datanodeinfo.getHostName());
    m.put("storageID", datanodeinfo.getDatanodeUuid());
    m.put("xferPort", datanodeinfo.getXferPort());
    m.put("infoPort", datanodeinfo.getInfoPort());
    m.put("infoSecurePort", datanodeinfo.getInfoSecurePort());
    m.put("ipcPort", datanodeinfo.getIpcPort());

    m.put("capacity", datanodeinfo.getCapacity());
    m.put("dfsUsed", datanodeinfo.getDfsUsed());
    m.put("remaining", datanodeinfo.getRemaining());
    m.put("blockPoolUsed", datanodeinfo.getBlockPoolUsed());
    m.put("cacheCapacity", datanodeinfo.getCacheCapacity());
    m.put("cacheUsed", datanodeinfo.getCacheUsed());
    m.put("lastUpdate", datanodeinfo.getLastUpdate());
    m.put("lastUpdateMonotonic", datanodeinfo.getLastUpdateMonotonic());
    m.put("xceiverCount", datanodeinfo.getXceiverCount());
    m.put("networkLocation", datanodeinfo.getNetworkLocation());
    m.put("adminState", datanodeinfo.getAdminState().name());
    return m;
}

From source file:io.hops.experiments.benchmarks.blockreporting.TinyDatanodes.java

License:Apache License

private ExtendedBlock addBlocks(ClientProtocol nameNodeProto, DatanodeProtocol datanodeProto, String fileName,
        String clientName) throws IOException, SQLException {
    ExtendedBlock prevBlock = null;/*  w ww  . j a v  a  2s .  com*/
    for (int jdx = 0; jdx < blocksPerFile; jdx++) {
        LocatedBlock loc = null;
        try {
            loc = nameNodeProto.addBlock(fileName, clientName, prevBlock, helper.getExcludedDatanodes());
            prevBlock = loc.getBlock();
            for (DatanodeInfo dnInfo : loc.getLocations()) {
                int dnIdx = Arrays.binarySearch(datanodes, dnInfo.getXferAddr());
                datanodes[dnIdx].addBlock(loc.getBlock().getLocalBlock());
                ReceivedDeletedBlockInfo[] rdBlocks = {
                        new ReceivedDeletedBlockInfo(loc.getBlock().getLocalBlock(),
                                ReceivedDeletedBlockInfo.BlockStatus.RECEIVED_BLOCK, null) };
                StorageReceivedDeletedBlocks[] report = { new StorageReceivedDeletedBlocks(
                        datanodes[dnIdx].dnRegistration.getStorageID(), rdBlocks) };
                datanodeProto.blockReceivedAndDeleted(datanodes[dnIdx].dnRegistration,
                        loc.getBlock().getBlockPoolId(), report);
            }
        } catch (IndexOutOfBoundsException e) {
            System.out.println(e);
            System.out.println("Located block " + Arrays.toString(loc.getLocations()));
            System.out.println("Excluded Nodes are " + Arrays.toString(helper.getExcludedDatanodes()));
        }
    }
    return prevBlock;
}