Example usage for org.apache.hadoop.fs Path toUri

List of usage examples for org.apache.hadoop.fs Path toUri

Introduction

In this page you can find the example usage for org.apache.hadoop.fs Path toUri.

Prototype

public URI toUri() 

Source Link

Document

Convert this Path to a URI.

Usage

From source file:com.ibm.crail.hdfs.CrailHadoopFileSystem.java

License:Apache License

@Override
public FileStatus getFileStatus(Path path) throws IOException {
    CrailNode directFile = null;/*from  ww  w  .j  a  v a  2 s  .c o  m*/
    try {
        directFile = dfs.lookup(path.toUri().getRawPath()).get();
    } catch (Exception e) {
        throw new IOException(e);
    }
    if (directFile == null) {
        throw new FileNotFoundException("File does not exist: " + path);
    }
    FsPermission permission = FsPermission.getFileDefault();
    if (directFile.getType().isDirectory()) {
        permission = FsPermission.getDirDefault();
    }
    FileStatus status = new FileStatus(directFile.getCapacity(), directFile.getType().isContainer(),
            CrailConstants.SHADOW_REPLICATION, CrailConstants.BLOCK_SIZE, directFile.getModificationTime(),
            directFile.getModificationTime(), permission, CrailConstants.USER, CrailConstants.USER,
            path.makeQualified(this.getUri(), this.workingDir));
    return status;
}

From source file:com.ibm.crail.hdfs.CrailHadoopFileSystem.java

License:Apache License

@Override
public BlockLocation[] getFileBlockLocations(Path path, long start, long len) throws IOException {
    try {/*  w ww.  java 2  s  .c o m*/
        CrailBlockLocation[] _locations = dfs.lookup(path.toUri().getRawPath()).get().asFile()
                .getBlockLocations(start, len);
        BlockLocation[] locations = new BlockLocation[_locations.length];
        for (int i = 0; i < locations.length; i++) {
            locations[i] = new BlockLocation();
            locations[i].setOffset(_locations[i].getOffset());
            locations[i].setLength(_locations[i].getLength());
            locations[i].setNames(_locations[i].getNames());
            locations[i].setHosts(_locations[i].getHosts());
            locations[i].setTopologyPaths(_locations[i].getTopology());

        }
        return locations;
    } catch (Exception e) {
        throw new IOException(e);
    }
}

From source file:com.ibm.crail.hdfs.CrailHDFS.java

License:Apache License

@Override
public FSDataOutputStream createInternal(Path path, EnumSet<CreateFlag> flag, FsPermission absolutePermission,
        int bufferSize, short replication, long blockSize, Progressable progress, ChecksumOpt checksumOpt,
        boolean createParent) throws AccessControlException, FileAlreadyExistsException, FileNotFoundException,
        ParentNotDirectoryException, UnsupportedFileSystemException, UnresolvedLinkException, IOException {
    CrailFile fileInfo = null;//from   w  w w . j  a v  a  2s . c  o m
    try {
        fileInfo = dfs.create(path.toUri().getRawPath(), CrailNodeType.DATAFILE, CrailStorageClass.PARENT,
                CrailLocationClass.PARENT).get().asFile();
    } catch (Exception e) {
        if (e.getMessage().contains(RpcErrors.messages[RpcErrors.ERR_PARENT_MISSING])) {
            fileInfo = null;
        } else {
            throw new IOException(e);
        }
    }

    if (fileInfo == null) {
        Path parent = path.getParent();
        this.mkdir(parent, FsPermission.getDirDefault(), true);
        try {
            fileInfo = dfs.create(path.toUri().getRawPath(), CrailNodeType.DATAFILE, CrailStorageClass.PARENT,
                    CrailLocationClass.PARENT).get().asFile();
        } catch (Exception e) {
            throw new IOException(e);
        }
    }

    CrailBufferedOutputStream outputStream = null;
    if (fileInfo != null) {
        try {
            fileInfo.syncDir();
            outputStream = fileInfo.getBufferedOutputStream(Integer.MAX_VALUE);
        } catch (Exception e) {
            throw new IOException(e);
        }
    } else {
        throw new IOException("Failed to create file, path " + path.toString());
    }

    if (outputStream != null) {
        return new CrailHDFSOutputStream(outputStream, statistics);
    } else {
        throw new IOException("Failed to create file, path " + path.toString());
    }
}

From source file:com.ibm.crail.hdfs.CrailHDFS.java

License:Apache License

@Override
public void mkdir(Path path, FsPermission permission, boolean createParent) throws AccessControlException,
        FileAlreadyExistsException, FileNotFoundException, UnresolvedLinkException, IOException {
    try {// www  .  j a v  a 2  s. c o m
        CrailDirectory file = dfs.create(path.toUri().getRawPath(), CrailNodeType.DIRECTORY,
                CrailStorageClass.PARENT, CrailLocationClass.DEFAULT).get().asDirectory();
        file.syncDir();
    } catch (Exception e) {
        if (e.getMessage().contains(RpcErrors.messages[RpcErrors.ERR_PARENT_MISSING])) {
            Path parent = path.getParent();
            mkdir(parent, permission, createParent);
            mkdir(path, permission, createParent);
        } else if (e.getMessage().contains(RpcErrors.messages[RpcErrors.ERR_FILE_EXISTS])) {
        } else {
            throw new IOException(e);
        }
    }
}

From source file:com.ibm.crail.hdfs.CrailHDFS.java

License:Apache License

@Override
public boolean delete(Path path, boolean recursive)
        throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException {
    try {/*  w w  w . j av  a2  s  . c om*/
        CrailNode file = dfs.delete(path.toUri().getRawPath(), recursive).get();
        if (file != null) {
            file.syncDir();
        }
        return file != null;
    } catch (Exception e) {
        throw new IOException(e);
    }
}

From source file:com.ibm.crail.hdfs.CrailHDFS.java

License:Apache License

@Override
public FSDataInputStream open(Path path, int bufferSize)
        throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException {
    CrailFile fileInfo = null;//ww  w.j a va  2s.co m
    try {
        fileInfo = dfs.lookup(path.toUri().getRawPath()).get().asFile();
    } catch (Exception e) {
        throw new IOException(e);
    }

    CrailBufferedInputStream inputStream = null;
    if (fileInfo != null) {
        try {
            inputStream = fileInfo.getBufferedInputStream(fileInfo.getCapacity());
        } catch (Exception e) {
            throw new IOException(e);
        }
    }

    if (inputStream != null) {
        return new CrailHDFSInputStream(inputStream);
    } else {
        throw new IOException("Failed to open file, path " + path.toString());
    }
}

From source file:com.ibm.crail.hdfs.CrailHDFS.java

License:Apache License

@Override
public void renameInternal(Path src, Path dst) throws AccessControlException, FileAlreadyExistsException,
        FileNotFoundException, ParentNotDirectoryException, UnresolvedLinkException, IOException {
    try {/*from w w w  .j ava2 s  .c om*/
        CrailNode file = dfs.rename(src.toUri().getRawPath(), dst.toUri().getRawPath()).get();
        if (file != null) {
            file.syncDir();
        }
    } catch (Exception e) {
        throw new IOException(e);
    }
}

From source file:com.ibm.crail.hdfs.CrailHDFS.java

License:Apache License

@Override
public FileStatus getFileStatus(Path path)
        throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException {
    CrailNode directFile = null;//from   w  w  w . j  a  v  a  2s .co m
    try {
        directFile = dfs.lookup(path.toUri().getRawPath()).get();
    } catch (Exception e) {
        throw new IOException(e);
    }
    if (directFile == null) {
        throw new FileNotFoundException("filename " + path);
    }

    FsPermission permission = FsPermission.getFileDefault();
    if (directFile.getType().isDirectory()) {
        permission = FsPermission.getDirDefault();
    }
    FileStatus status = new FileStatus(directFile.getCapacity(), directFile.getType().isContainer(),
            CrailConstants.SHADOW_REPLICATION, CrailConstants.BLOCK_SIZE, directFile.getModificationTime(),
            directFile.getModificationTime(), permission, CrailConstants.USER, CrailConstants.USER,
            path.makeQualified(this.getUri(), this.workingDir));
    return status;
}

From source file:com.ibm.crail.hdfs.CrailHDFS.java

License:Apache License

@Override
public BlockLocation[] getFileBlockLocations(Path path, long start, long len)
        throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException {
    try {/*from w  w w  .ja  v a  2  s  .c o  m*/
        CrailBlockLocation[] _locations = dfs.lookup(path.toUri().getRawPath()).get().asFile()
                .getBlockLocations(start, len);
        BlockLocation[] locations = new BlockLocation[_locations.length];
        for (int i = 0; i < locations.length; i++) {
            locations[i] = new BlockLocation();
            locations[i].setOffset(_locations[i].getOffset());
            locations[i].setLength(_locations[i].getLength());
            locations[i].setNames(_locations[i].getNames());
            locations[i].setHosts(_locations[i].getHosts());
            locations[i].setTopologyPaths(_locations[i].getTopology());

        }
        return locations;
    } catch (Exception e) {
        throw new IOException(e);
    }
}

From source file:com.ibm.crail.hdfs.CrailHDFS.java

License:Apache License

@Override
public FileStatus[] listStatus(Path path)
        throws AccessControlException, FileNotFoundException, UnresolvedLinkException, IOException {
    try {/* w w  w.  ja  v  a  2 s. c  o m*/
        CrailNode node = dfs.lookup(path.toUri().getRawPath()).get();
        Iterator<String> iter = node.getType() == CrailNodeType.DIRECTORY ? node.asDirectory().listEntries()
                : node.asMultiFile().listEntries();
        ArrayList<FileStatus> statusList = new ArrayList<FileStatus>();
        while (iter.hasNext()) {
            String filepath = iter.next();
            CrailNode directFile = dfs.lookup(filepath).get();
            if (directFile != null) {
                FsPermission permission = FsPermission.getFileDefault();
                if (directFile.getType().isDirectory()) {
                    permission = FsPermission.getDirDefault();
                }
                FileStatus status = new FileStatus(directFile.getCapacity(), directFile.getType().isContainer(),
                        CrailConstants.SHADOW_REPLICATION, CrailConstants.BLOCK_SIZE,
                        directFile.getModificationTime(), directFile.getModificationTime(), permission,
                        CrailConstants.USER, CrailConstants.USER,
                        new Path(filepath).makeQualified(this.getUri(), workingDir));
                statusList.add(status);
            }
        }
        FileStatus[] list = new FileStatus[statusList.size()];
        statusList.toArray(list);
        return list;
    } catch (Exception e) {
        throw new FileNotFoundException(path.toUri().getRawPath());
    }
}