Example usage for org.apache.hadoop.hdfs DFSClient create

List of usage examples for org.apache.hadoop.hdfs DFSClient create

Introduction

In this page you can find the example usage for org.apache.hadoop.hdfs DFSClient create.

Prototype

public OutputStream create(String src, boolean overwrite) throws IOException 

Source Link

Document

Call #create(String,boolean,short,long,Progressable) with default replication and blockSize and null progress.

Usage

From source file:com.sun.kohsuke.hadoop.importer.App.java

License:Open Source License

public static void main(String[] args) throws Exception {
    if (args.length != 3) {
        System.out.println("Usage: java -jar importer.jar [HDFS URL] [local directory] [HDFS directory]");
        System.exit(-1);/*from   ww w. j av  a  2  s. c  o m*/
    }

    Configuration conf = new Configuration();
    conf.set("fs.default.name", args[0]);
    DFSClient dfs = new DFSClient(conf);

    File in = new File(args[1]);
    String out = args[2];

    File[] children = in.listFiles(new FileFilter() {
        public boolean accept(File child) {
            return child.isFile();
        }
    });
    if (children == null) {
        System.out.println("No such directory exists: " + in);
        System.exit(-1);
    }
    int cnt = 1;
    for (File f : children) {
        String dest = out + '/' + f.getName();
        FileStatus i = dfs.getFileInfo(dest);
        if (i == null || i.getModificationTime() != f.lastModified() || i.getLen() != f.length()) {
            System.out.printf("(%d/%d) Importing %s\n", cnt, children.length, f);
            try {
                IOUtils.copyBytes(new FileInputStream(f), dfs.create(dest, true), conf);
                dfs.setTimes(dest, f.lastModified(), f.lastModified());
            } catch (RemoteException e) {
                // failure to create
                e.printStackTrace();
            }
        } else {
            System.out.printf("(%d/%d) Skipping %s\n", cnt, children.length, f);
        }
        cnt++;
    }
}

From source file:org.opencloudengine.flamingo.mapreduce.util.HdfsUtils.java

License:Apache License

/**
 * DFS Client?  ? .// www .  j  ava  2  s .c o  m
 *
 * @param client    DFS Client
 * @param filename  ?
 * @param overwrite Overwrite 
 * @return  
 * @throws java.io.IOException HDFS IO    
 */
public static OutputStream getOutputStream(DFSClient client, String filename, boolean overwrite)
        throws IOException {
    return client.create(filename, overwrite);
}

From source file:util.ResourceFile.java

License:Open Source License

public static OutputStream getOutputStream(String fileURI) throws IOException {
    OutputStream os = null;/*from   www .ja  v a  2s .  c  o  m*/

    if (isHDFSFile(fileURI)) {
        // Meta data file is a file within HDFS
        Configuration conf = new Configuration();
        DFSClient dfsClient = new DFSClient(conf);
        String hdfsFileName = hdfsFileName(fileURI);

        // Create file in HDFS
        os = dfsClient.create(hdfsFileName, true);
    } else {
        // Create file on the local file system
        File metaFile = new File(fileURI);
        os = new FileOutputStream(metaFile);
    }
    // Create a GZIP File if 'requested'
    if (isGzipFile(fileURI)) {
        os = new GZIPOutputStream(os);
    }
    return os;
}