Example usage for org.apache.hadoop.hdfs DFSClient getFileInfo

List of usage examples for org.apache.hadoop.hdfs DFSClient getFileInfo

Introduction

In this page you can find the example usage for org.apache.hadoop.hdfs DFSClient getFileInfo.

Prototype

public HdfsFileStatus getFileInfo(String src) throws IOException 

Source Link

Document

Get the file info for a specific file or directory.

Usage

From source file:com.sun.kohsuke.hadoop.importer.App.java

License:Open Source License

public static void main(String[] args) throws Exception {
    if (args.length != 3) {
        System.out.println("Usage: java -jar importer.jar [HDFS URL] [local directory] [HDFS directory]");
        System.exit(-1);//from w ww .jav  a2  s. c  om
    }

    Configuration conf = new Configuration();
    conf.set("fs.default.name", args[0]);
    DFSClient dfs = new DFSClient(conf);

    File in = new File(args[1]);
    String out = args[2];

    File[] children = in.listFiles(new FileFilter() {
        public boolean accept(File child) {
            return child.isFile();
        }
    });
    if (children == null) {
        System.out.println("No such directory exists: " + in);
        System.exit(-1);
    }
    int cnt = 1;
    for (File f : children) {
        String dest = out + '/' + f.getName();
        FileStatus i = dfs.getFileInfo(dest);
        if (i == null || i.getModificationTime() != f.lastModified() || i.getLen() != f.length()) {
            System.out.printf("(%d/%d) Importing %s\n", cnt, children.length, f);
            try {
                IOUtils.copyBytes(new FileInputStream(f), dfs.create(dest, true), conf);
                dfs.setTimes(dest, f.lastModified(), f.lastModified());
            } catch (RemoteException e) {
                // failure to create
                e.printStackTrace();
            }
        } else {
            System.out.printf("(%d/%d) Skipping %s\n", cnt, children.length, f);
        }
        cnt++;
    }
}

From source file:org.opencloudengine.flamingo.mapreduce.util.HdfsUtils.java

License:Apache License

/**
 *   ?? ?.// ww w.  j  a va 2  s  .  c  o  m
 *
 * @param client DFS Client
 * @param path   
 * @return ??  <tt>true</tt>
 * @throws java.io.IOException HDFS IO    
 */
public static boolean isFile(DFSClient client, String path) throws IOException {
    HdfsFileStatus status = client.getFileInfo(path);
    return !status.isDir();
}

From source file:org.opencloudengine.flamingo.mapreduce.util.HdfsUtils.java

License:Apache License

/**
 *  ? ?  ./*ww w. j  a  va  2 s  .c  o m*/
 *
 * @param client DFS Client
 * @param path   ?   
 * @return ? 
 * @throws java.io.IOException HDFS IO    
 */
public static HdfsFileStatus getFileInfo(DFSClient client, String path) throws IOException {
    return client.getFileInfo(path);
}

From source file:org.opencloudengine.flamingo.mapreduce.util.HdfsUtils.java

License:Apache License

/**
 *  ? ??  ?./*from  w w  w.j  a  v  a2 s .c  o  m*/
 *
 * @param hdfsUrl HDFS URL
 * @param path      ?  
 * @return  <tt>true</tt>
 * @throws java.io.IOException ?     , HDFS?    
 */
public static boolean isExist(String hdfsUrl, String path) throws IOException {
    DFSClient client = HdfsUtils.createDFSClient(hdfsUrl);
    HdfsFileStatus status = client.getFileInfo(path);
    if (status != null && !status.isDir()) {
        client.close();
        return true;
    }
    client.close();
    return false;
}