List of usage examples for org.apache.hadoop.hdfs.server.blockmanagement BlockManager getDatanodeManager
public DatanodeManager getDatanodeManager()
From source file:backup.namenode.NameNodeBackupServicePlugin.java
License:Apache License
private BackupWebService<Stats> getBackupWebService(UserGroupInformation ugi, BlockManager blockManager, NameNodeRestoreProcessor restoreProcessor) throws IOException { File reportPath = restoreProcessor.getReportPath(); return new BackupWebService<Stats>() { @Override//from www . j a va 2s. co m public StatsWritable getStats() throws IOException { StatsWritable stats = new StatsWritable(); Set<DatanodeDescriptor> datanodes = blockManager.getDatanodeManager().getDatanodes(); for (DatanodeInfo datanodeInfo : datanodes) { try { DataNodeBackupRPC backup = DataNodeBackupRPC.getDataNodeBackupRPC(datanodeInfo, getConf(), ugi); stats.add(backup.getBackupStats()); stats.add(backup.getRestoreStats()); } catch (Exception e) { LOG.error("Error while trying to read hdfs backup stats from datanode {}", datanodeInfo.getHostName()); } } return stats; } @Override public void runReport(boolean debug) throws IOException { restoreProcessor.runReport(debug); } @Override public List<String> listReports() throws IOException { Builder<String> builder = ImmutableList.builder(); if (!reportPath.exists()) { return builder.build(); } File[] list = reportPath.listFiles((dir, name) -> name.startsWith("report.")); if (list != null) { Arrays.sort(list, Collections.reverseOrder()); for (File f : list) { builder.add(f.getName()); } } return builder.build(); } @Override public InputStream getReport(String id) throws IOException { File file = new File(reportPath, id); if (file.exists()) { return new FileInputStream(file); } return null; } }; }