List of usage examples for org.apache.hadoop.hdfs.protocol CacheDirectiveEntry CacheDirectiveEntry
public CacheDirectiveEntry(CacheDirectiveInfo info, CacheDirectiveStats stats)
From source file:com.mellanox.r4h.DistributedFileSystem.java
License:Apache License
/** * List cache directives. Incrementally fetches results from the server. * /*from w ww . j a va2 s. co m*/ * @param filter * Filter parameters to use when listing the directives, null to * list all directives visible to us. * @return A RemoteIterator which returns CacheDirectiveInfo objects. */ public RemoteIterator<CacheDirectiveEntry> listCacheDirectives(CacheDirectiveInfo filter) throws IOException { if (filter == null) { filter = new CacheDirectiveInfo.Builder().build(); } if (filter.getPath() != null) { filter = new CacheDirectiveInfo.Builder(filter) .setPath(new Path(getPathName(fixRelativePart(filter.getPath())))).build(); } final RemoteIterator<CacheDirectiveEntry> iter = dfs.listCacheDirectives(filter); return new RemoteIterator<CacheDirectiveEntry>() { @Override public boolean hasNext() throws IOException { return iter.hasNext(); } @Override public CacheDirectiveEntry next() throws IOException { // Although the paths we get back from the NameNode should always be // absolute, we call makeQualified to add the scheme and authority of // this DistributedFilesystem. CacheDirectiveEntry desc = iter.next(); CacheDirectiveInfo info = desc.getInfo(); Path p = info.getPath().makeQualified(getUri(), getWorkingDirectory()); return new CacheDirectiveEntry(new CacheDirectiveInfo.Builder(info).setPath(p).build(), desc.getStats()); } }; }