wanggang1987.bigdataapi.hadoopapi.HadoopClientAPI.java Source code

Java tutorial

Introduction

Here is the source code for wanggang1987.bigdataapi.hadoopapi.HadoopClientAPI.java

Source

/*
 * To change this license header, choose License Headers in Project Properties.
 * To change this template file, choose Tools | Templates
 * and open the template in the editor.
 */
package wanggang1987.bigdataapi.hadoopapi;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Date;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
 *
 * @author wanggang
 */
public class HadoopClientAPI {

    private FileSystem fs = null;
    private DistributedFileSystem hdfs = null;
    private String HADOOP_URL = null;
    private Configuration conf = null;
    private final Logger logger = LoggerFactory.getLogger(HadoopClientAPI.class);

    public void HadoopClientAPI() {
        conf = new Configuration();
    }

    public void init(String usr, String hdfsURL) {
        try {
            System.setProperty("HADOOP_USER_NAME", usr);
            HADOOP_URL = hdfsURL;
            FileSystem.setDefaultUri(conf, HADOOP_URL);
            fs = FileSystem.get(conf);
            hdfs = (DistributedFileSystem) fs;
        } catch (Exception e) {
            logger.error("init failed", e);
        }
    }

    /**
     * DataNode???
     *
     * @return hostname list
     */
    public ArrayList<String> listDataNodes() {
        ArrayList<String> list = null;
        try {
            list = new ArrayList<>();
            for (DatanodeInfo dataNodeStat : hdfs.getDataNodeStats()) {
                list.add(dataNodeStat.getHostName());
            }
        } catch (Exception e) {
            logger.error("listDataNodes failed", e);
        }
        return list;
    }

    /**
     * list all file
     *
     * @param path
     * @return
     * @throws IllegalArgumentException
     */
    public ArrayList<String> listFiles(String path) {
        ArrayList<String> list = null;
        try {
            list = new ArrayList<>();
            FileStatus[] files = hdfs.listStatus(new Path(path));
            for (FileStatus file : files) {
                if (file.isDirectory() == false) {
                    list.add(file.getPath().toString());
                }
            }
        } catch (IllegalArgumentException | IOException e) {
            logger.error("listFiles failed", e);
        }
        return list;
    }

    /**
     * ?
     *
     * @param path file path
     * @return boolean
     */
    public boolean isExist(String path) {
        boolean exist = false;
        try {
            exist = hdfs.exists(new Path(path));
        } catch (IllegalArgumentException | IOException e) {
            logger.error("checkFileExist failed", e);
        }
        return exist;
    }

    /**
     * HDFS
     *
     * @param path 
     * @return boolean
     */
    public boolean createFile(String path) {
        if (isExist(path)) {
            return false;
        }
        boolean ret = false;
        try {
            FSDataOutputStream os = hdfs.create(new Path(path), true);
            if (os != null) {
                ret = true;
            }
        } catch (IllegalArgumentException | IOException e) {
            logger.error("createFile failed", e);
        }
        return ret;
    }

    public boolean createDir(String path) {
        if (isExist(path)) {
            return false;
        }
        boolean ret = false;
        try {
            ret = hdfs.mkdirs(new Path(path));
        } catch (IllegalArgumentException | IOException e) {
            logger.error("createFile failed", e);
        }
        return ret;
    }

    /**
     * ?HDFS ???UTF-8->HDFS
     */
    public void copyFileToHDFS() {
        try {
            Path f = new Path("/user/xxx/input02/file01");
            File file = new File("E:\\hadoopTest\\temporary.txt");

            FileInputStream is = new FileInputStream(file);
            InputStreamReader isr = new InputStreamReader(is, "utf-8");
            BufferedReader br = new BufferedReader(isr);

            FSDataOutputStream os = fs.create(f, true);
            Writer out = new OutputStreamWriter(os, "utf-8");

            String str = null;
            while ((str = br.readLine()) != null) {
                out.write(str + "\n");
            }
            br.close();
            isr.close();
            is.close();
            out.close();
            os.close();
            System.out.println(
                    "Write content of file " + file.getName() + " to hdfs file " + f.getName() + " success");
        } catch (IllegalArgumentException | IOException e) {
            logger.error("ERROR", e);
        }
    }

    /**
     * ???..
     */
    public void getLocation() {
        try {
            Path f = new Path("/user/xxx/input02/file01");
            FileStatus fileStatus = fs.getFileStatus(f);

            BlockLocation[] blkLocations = fs.getFileBlockLocations(fileStatus, 0, fileStatus.getLen());
            for (BlockLocation currentLocation : blkLocations) {
                String[] hosts = currentLocation.getHosts();
                for (String host : hosts) {
                    System.out.println(host);
                }
            }

            //??
            long modifyTime = fileStatus.getModificationTime();
            Date d = new Date(modifyTime);
            System.out.println(d);
        } catch (IllegalArgumentException | IOException e) {
            logger.error("ERROR", e);
        }
    }

    /**
     * ?hdfs
     */
    public void readFileFromHdfs() {
        try {
            Path f = new Path("/user/xxx/input02/file01");

            FSDataInputStream dis = fs.open(f);
            InputStreamReader isr = new InputStreamReader(dis, "utf-8");
            BufferedReader br = new BufferedReader(isr);
            String str = null;
            while ((str = br.readLine()) != null) {
                System.out.println(str);
            }
            br.close();
            isr.close();
            dis.close();
        } catch (IllegalArgumentException | IOException e) {
            logger.error("ERROR", e);
        }
    }

}