List of usage examples for org.apache.hadoop.fs FileStatus isSymlink
public boolean isSymlink()
From source file:ch.cern.db.hdfs.Main.java
License:GNU General Public License
private void printFileStatus(FileStatus status) { System.out.println();//ww w .j ava2 s. com System.out.println("Showing metadata for: " + status.getPath()); System.out.println(" isDirectory: " + status.isDirectory()); System.out.println(" isFile: " + status.isFile()); System.out.println(" isSymlink: " + status.isSymlink()); System.out.println(" encrypted: " + status.isEncrypted()); System.out.println(" length: " + status.getLen()); System.out.println(" replication: " + status.getReplication()); System.out.println(" blocksize: " + status.getBlockSize()); System.out.println(" modification_time: " + new Date(status.getModificationTime())); System.out.println(" access_time: " + new Date(status.getAccessTime())); System.out.println(" owner: " + status.getOwner()); System.out.println(" group: " + status.getGroup()); System.out.println(" permission: " + status.getPermission()); System.out.println(); }
From source file:com.mellanox.r4h.DistributedFileSystem.java
License:Apache License
/** * Move blocks from srcs to trg and delete srcs afterwards. * The file block sizes must be the same. * /*w w w . ja va2 s. c o m*/ * @param trg * existing file to append to * @param psrcs * list of files (same block size, same replication) * @throws IOException */ @Override public void concat(Path trg, Path[] psrcs) throws IOException { statistics.incrementWriteOps(1); // Make target absolute Path absF = fixRelativePart(trg); // Make all srcs absolute Path[] srcs = new Path[psrcs.length]; for (int i = 0; i < psrcs.length; i++) { srcs[i] = fixRelativePart(psrcs[i]); } // Try the concat without resolving any links String[] srcsStr = new String[psrcs.length]; try { for (int i = 0; i < psrcs.length; i++) { srcsStr[i] = getPathName(srcs[i]); } dfs.concat(getPathName(trg), srcsStr); } catch (UnresolvedLinkException e) { // Exception could be from trg or any src. // Fully resolve trg and srcs. Fail if any of them are a symlink. FileStatus stat = getFileLinkStatus(absF); if (stat.isSymlink()) { throw new IOException("Cannot concat with a symlink target: " + trg + " -> " + stat.getPath()); } absF = fixRelativePart(stat.getPath()); for (int i = 0; i < psrcs.length; i++) { stat = getFileLinkStatus(srcs[i]); if (stat.isSymlink()) { throw new IOException( "Cannot concat with a symlink src: " + psrcs[i] + " -> " + stat.getPath()); } srcs[i] = fixRelativePart(stat.getPath()); } // Try concat again. Can still race with another symlink. for (int i = 0; i < psrcs.length; i++) { srcsStr[i] = getPathName(srcs[i]); } dfs.concat(getPathName(absF), srcsStr); } }
From source file:com.mellanox.r4h.DistributedFileSystem.java
License:Apache License
@Override public FileStatus getFileLinkStatus(final Path f) throws AccessControlException, FileNotFoundException, UnsupportedFileSystemException, IOException { statistics.incrementReadOps(1);//from www . j a v a2 s . c o m final Path absF = fixRelativePart(f); FileStatus status = new FileSystemLinkResolver<FileStatus>() { @Override public FileStatus doCall(final Path p) throws IOException, UnresolvedLinkException { HdfsFileStatus fi = dfs.getFileLinkInfo(getPathName(p)); if (fi != null) { return fi.makeQualified(getUri(), p); } else { throw new FileNotFoundException("File does not exist: " + p); } } @Override public FileStatus next(final FileSystem fs, final Path p) throws IOException, UnresolvedLinkException { return fs.getFileLinkStatus(p); } }.resolve(this, absF); // Fully-qualify the symlink if (status.isSymlink()) { Path targetQual = FSLinkResolver.qualifySymlinkTarget(this.getUri(), status.getPath(), status.getSymlink()); status.setSymlink(targetQual); } return status; }
From source file:com.streamsets.pipeline.stage.origin.hdfs.spooler.HdfsFile.java
License:Apache License
@SuppressWarnings("unchecked") public Map<String, Object> getFileMetadata() throws IOException { FileStatus file = fs.getFileStatus(filePath); Map<String, Object> metadata = new HashMap<>(); metadata.put(HeaderAttributeConstants.FILE_NAME, file.getPath().getName()); metadata.put(HeaderAttributeConstants.FILE, file.getPath().toUri().getPath()); metadata.put(HeaderAttributeConstants.LAST_MODIFIED_TIME, file.getModificationTime()); metadata.put(HeaderAttributeConstants.LAST_ACCESS_TIME, file.getAccessTime()); metadata.put(HeaderAttributeConstants.IS_DIRECTORY, file.isDirectory()); metadata.put(HeaderAttributeConstants.IS_SYMBOLIC_LINK, file.isSymlink()); metadata.put(HeaderAttributeConstants.SIZE, file.getLen()); metadata.put(HeaderAttributeConstants.OWNER, file.getOwner()); metadata.put(HeaderAttributeConstants.GROUP, file.getGroup()); metadata.put(HeaderAttributeConstants.BLOCK_SIZE, file.getBlockSize()); metadata.put(HeaderAttributeConstants.REPLICATION, file.getReplication()); metadata.put(HeaderAttributeConstants.IS_ENCRYPTED, file.isEncrypted()); FsPermission permission = file.getPermission(); if (permission != null) { metadata.put(PERMISSIONS, permission.toString()); }// w w w . java 2 s. c o m return metadata; }
From source file:com.uber.hoodie.common.table.timeline.dto.FileStatusDTO.java
License:Apache License
public static FileStatusDTO fromFileStatus(FileStatus fileStatus) { if (null == fileStatus) { return null; }//from w w w .j a v a 2s. c o m FileStatusDTO dto = new FileStatusDTO(); try { dto.path = FilePathDTO.fromPath(fileStatus.getPath()); dto.length = fileStatus.getLen(); dto.isdir = fileStatus.isDirectory(); dto.blockReplication = fileStatus.getReplication(); dto.blocksize = fileStatus.getBlockSize(); dto.modificationTime = fileStatus.getModificationTime(); dto.accessTime = fileStatus.getModificationTime(); dto.symlink = fileStatus.isSymlink() ? FilePathDTO.fromPath(fileStatus.getSymlink()) : null; safeReadAndSetMetadata(dto, fileStatus); } catch (IOException ioe) { throw new HoodieException(ioe); } return dto; }
From source file:eagle.security.hdfs.entity.FileStatusEntity.java
License:Apache License
public FileStatusEntity(FileStatus status) throws IOException { //this.path = status.getPath(); this.length = status.getLen(); this.isdir = status.isDirectory(); this.block_replication = status.getReplication(); this.blocksize = status.getBlockSize(); this.modification_time = status.getModificationTime(); this.access_time = status.getAccessTime(); this.permission = status.getPermission(); this.owner = status.getOwner(); this.group = status.getGroup(); if (status.isSymlink()) { this.symlink = status.getSymlink(); }/*from w w w. j a v a 2 s. c o m*/ }
From source file:fr.ens.biologie.genomique.eoulsan.data.protocols.PathDataProtocol.java
License:LGPL
@Override public DataFileMetadata getMetadata(final DataFile src) throws IOException { if (!exists(src, true)) { throw new FileNotFoundException("File not found: " + src); }//from w w w .java 2s .co m final Path path = getPath(src); final FileStatus status = path.getFileSystem(this.conf).getFileStatus(path); final SimpleDataFileMetadata result = new SimpleDataFileMetadata(); result.setContentLength(status.getLen()); result.setLastModified(status.getModificationTime()); result.setDataFormat(DataFormatRegistry.getInstance().getDataFormatFromFilename(src.getName())); final CompressionType ct = CompressionType.getCompressionTypeByFilename(src.getSource()); if (ct != null) { result.setContentEncoding(ct.getContentEncoding()); } if (status.isDirectory()) { result.setDirectory(true); } if (status.isSymlink()) { result.setSymbolicLink(new DataFile(status.getSymlink().toUri())); } return result; }
From source file:fr.ens.biologie.genomique.eoulsan.data.protocols.PathDataProtocol.java
License:LGPL
@Override public boolean exists(final DataFile src, final boolean followLink) { final Path path = getPath(src); try {//w w w. j a va2s . com final FileSystem fs = path.getFileSystem(conf); final FileStatus status = fs.getFileStatus(path); if (status == null) { return false; } if (status.isSymlink()) { return fs.getFileStatus(fs.getLinkTarget(path)) != null; } return true; } catch (IOException e) { return false; } }
From source file:gobblin.util.filesystem.InstrumentedFileSystemUtils.java
License:Apache License
/** * Replace the scheme of the input {@link FileStatus} if it matches the string to replace. *///w w w . j a va2 s. c om public static FileStatus replaceScheme(FileStatus st, String replace, String replacement) { if (replace != null && replace.equals(replacement)) { return st; } try { return new FileStatus(st.getLen(), st.isDir(), st.getReplication(), st.getBlockSize(), st.getModificationTime(), st.getAccessTime(), st.getPermission(), st.getOwner(), st.getGroup(), st.isSymlink() ? st.getSymlink() : null, replaceScheme(st.getPath(), replace, replacement)); } catch (IOException ioe) { throw new RuntimeException(ioe); } }
From source file:hdfs.jsr203.attribute.HadoopFileAttributeView.java
License:Apache License
Object attribute(AttrID id, FileStatus hfas) { switch (id) { case accessTime: return hfas.getAccessTime(); case blockSize: return hfas.getBlockSize(); case group://from ww w. java2 s .c o m return hfas.getGroup(); case len: return hfas.getLen(); case modificationTime: return hfas.getModificationTime(); case owner: return hfas.getOwner(); case replication: return hfas.getReplication(); case isDirectory: return hfas.isDirectory(); // TODO enable encryption //case isEncrypted: // return hfas.isEncrypted(); case isFile: return hfas.isFile(); case isSymLink: return hfas.isSymlink(); } return null; }