List of usage examples for org.apache.hadoop.hdfs.protocol HdfsFileStatus EMPTY_NAME
null EMPTY_NAME
To view the source code for org.apache.hadoop.hdfs.protocol HdfsFileStatus EMPTY_NAME.
Click Source Link
From source file:MStress_Client.java
License:Open Source License
private static int listDFSPaths() { Date alpha = new Date(); int inodeCount = 0; String basePath = new String(TEST_BASE_DIR) + "/" + hostName_ + "_" + processName_; Queue<String> pending = new LinkedList<String>(); pending.add(basePath);/*from w ww . j ava 2 s . c om*/ while (!pending.isEmpty()) { String parent = pending.remove(); DirectoryListing thisListing; try { thisListing = dfsClient_.listPaths(parent, HdfsFileStatus.EMPTY_NAME); if (thisListing == null || thisListing.getPartialListing().length == 0) { //System.out.println("Empty directory"); continue; } do { HdfsFileStatus[] children = thisListing.getPartialListing(); for (int i = 0; i < children.length; i++) { String localName = children[i].getLocalName(); //System.out.printf("Readdir going through [%s/%s]\n", parent, localName); if (localName.equals(".") || localName.equals("..")) { continue; } inodeCount++; if (inodeCount % COUNT_INCR == 0) { System.out.printf("Readdir paths so far: %d\n", inodeCount); } if (children[i].isDir()) { pending.add(parent + "/" + localName); } } if (!thisListing.hasMore()) { break; } else { //System.out.println("Remaining entries " + Integer.toString(thisListing.getRemainingEntries())); } thisListing = dfsClient_.listPaths(parent, thisListing.getLastName()); } while (thisListing != null); } catch (IOException e) { e.printStackTrace(); return -1; } } Date zigma = new Date(); System.out.printf("Client: Directory walk done over %d inodes in %d msec\n", inodeCount, timeDiffMilliSec(alpha, zigma)); return 0; }
From source file:com.mellanox.r4h.DistributedFileSystem.java
License:Apache License
private FileStatus[] listStatusInternal(Path p) throws IOException { String src = getPathName(p);//w ww .j a va 2 s.c o m // fetch the first batch of entries in the directory DirectoryListing thisListing = dfs.listPaths(src, HdfsFileStatus.EMPTY_NAME); if (thisListing == null) { // the directory does not exist throw new FileNotFoundException("File " + p + " does not exist."); } HdfsFileStatus[] partialListing = thisListing.getPartialListing(); if (!thisListing.hasMore()) { // got all entries of the directory FileStatus[] stats = new FileStatus[partialListing.length]; for (int i = 0; i < partialListing.length; i++) { stats[i] = partialListing[i].makeQualified(getUri(), p); } statistics.incrementReadOps(1); return stats; } // The directory size is too big that it needs to fetch more // estimate the total number of entries in the directory int totalNumEntries = partialListing.length + thisListing.getRemainingEntries(); ArrayList<FileStatus> listing = new ArrayList<FileStatus>(totalNumEntries); // add the first batch of entries to the array list for (HdfsFileStatus fileStatus : partialListing) { listing.add(fileStatus.makeQualified(getUri(), p)); } statistics.incrementLargeReadOps(1); // now fetch more entries do { thisListing = dfs.listPaths(src, thisListing.getLastName()); if (thisListing == null) { // the directory is deleted throw new FileNotFoundException("File " + p + " does not exist."); } partialListing = thisListing.getPartialListing(); for (HdfsFileStatus fileStatus : partialListing) { listing.add(fileStatus.makeQualified(getUri(), p)); } statistics.incrementLargeReadOps(1); } while (thisListing.hasMore()); return listing.toArray(new FileStatus[listing.size()]); }