org.exem.flamingo.shared.util.HdfsUtils.java Source code

Java tutorial

Introduction

Here is the source code for org.exem.flamingo.shared.util.HdfsUtils.java

Source

/*
 * Copyright 2012-2016 the Flamingo Community.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *      http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package org.exem.flamingo.shared.util;

import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.fs.permission.FsPermission;
import org.exem.flamingo.shared.core.exception.ServiceException;
import org.slf4j.helpers.MessageFormatter;

import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;

/**
 * Hadoop HDFS .
 *
 * @author Byoung Gon, Kim
 * @since 0.1
 */
public class HdfsUtils {

    /**
     *  HDFS URL? Prefix
     */
    public static final String HDFS_URL_PREFIX = "hdfs://";

    /**
     * ?? ?      ?? ?
     */
    public static final long MAX_SIZE = 500 * FileUtils.KB;

    /**
     *  ?   ?  ??.
     *
     * @param source ?? 
     * @param target ?? 
     * @param fs     Hadoop FileSystem
     */
    public static void move(String source, String target, FileSystem fs) throws Exception {
        Path srcPath = new Path(source);
        Path[] srcs = FileUtil.stat2Paths(fs.globStatus(srcPath), srcPath);
        Path dst = new Path(target);
        if (srcs.length > 1 && !fs.getFileStatus(dst).isDirectory()) {
            throw new ServiceException("When moving multiple files, destination should be a directory.");
        }
        for (int i = 0; i < srcs.length; i++) {
            if (!fs.rename(srcs[i], dst)) {
                FileStatus srcFstatus = null;
                FileStatus dstFstatus = null;
                try {
                    srcFstatus = fs.getFileStatus(srcs[i]);
                } catch (FileNotFoundException e) {
                    throw new FileNotFoundException(srcs[i] + ": No such file or directory");
                }
                try {
                    dstFstatus = fs.getFileStatus(dst);
                } catch (IOException e) {
                    // Nothing
                }
                if ((srcFstatus != null) && (dstFstatus != null)) {
                    if (srcFstatus.isDirectory() && !dstFstatus.isDirectory()) {
                        throw new ServiceException(
                                "cannot overwrite non directory " + dst + " with directory " + srcs[i]);
                    }
                }
                throw new ServiceException("Failed to rename " + srcs[i] + " to " + dst);
            }
        }
    }

    /**
     *  HDFS? ??  InputStream? .
     *
     * @param fs       FileSystem
     * @param filename fully qualified path
     * @return  
     * @throws IOException ??    
     */
    public static InputStream getInputStream(FileSystem fs, String filename) throws IOException {
        return fs.open(new Path(filename));
    }

    /**
     * "<tt>fs.default.name</tt>"?  HDFS URL  Hadoop FileSystem? .
     *
     * @param fsDefaultName "<tt>core-site.xml</tt>"? ??  "<tt>fs.default.name</tt>" 
     * @return FileSystem
     */
    public static FileSystem getFileSystem(String fsDefaultName) {
        Configuration conf = new Configuration();
        conf.set("fs.default.name", fsDefaultName);
        conf.set("fs.defaultFS", fsDefaultName);
        conf.set("fs.AbstractFileSystem.hdfs.impl", "org.apache.hadoop.fs.Hdfs");
        try {
            return FileSystem.get(conf);
        } catch (IOException e) {
            throw new ServiceException("Cannot get FileSystem.", e);
        }
    }

    /**
     *  ? ??  ? .
     *
     * @param fs   FileSystem
     * @param path ? Path
     * @return  ?? ?
     */
    public static String load(FileSystem fs, String path) {
        return load(fs, path, "UTF-8");
    }

    /**
     *   ? ?.
     *
     * @param fs   FileSystem
     * @param path ? Path
     * @return ?  <tt>true</tt>
     */
    public static boolean isDirectory(FileSystem fs, String path) {
        try {
            return !fs.isFile(new Path(path));
        } catch (Exception ex) {
            throw new ServiceException(ExceptionUtils.getMessage("Cannot access '{}'", path), ex);
        }
    }

    /**
     *   ?? ?.
     *
     * @param fs   FileSystem
     * @param path ? Path
     * @return ??  <tt>true</tt>
     */
    public static boolean isFile(FileSystem fs, String path) {
        try {
            return fs.isFile(new Path(path));
        } catch (Exception ex) {
            throw new ServiceException(ExceptionUtils.getMessage("Cannot access '{}'", path), ex);
        }
    }

    /**
     *   .
     *
     * @param fs   FileSystem
     * @param path  
     * @return ??   <tt>true</tt>
     */
    public static boolean delete(FileSystem fs, String path) {
        try {
            return fs.delete(new Path(path), true);
        } catch (Exception ex) {
            throw new ServiceException(ExceptionUtils.getMessage("Cannot delete '{}'", path), ex);
        }
    }

    /**
     *   ?.
     *
     * @param fs   FileSystem
     * @param path ? 
     * @return ?? ?  <tt>true</tt>
     */
    public static boolean mkdir(FileSystem fs, String path) {
        try {
            return FileSystem.mkdirs(fs, new Path(path), FsPermission.getDefault());
        } catch (Exception ex) {
            throw new ServiceException(ExceptionUtils.getMessage("Cannot create '{}'", path), ex);
        }
    }

    /**
     *  ? ?? ? . ?? ?   .
     * <ul>
     * <li>??  </li>
     * <li>?? ? </li>
     * </ul>
     *
     * @param fs       Hadoop? {@link FileSystem}
     * @param path     Path
     * @param encoding ?
     * @return ?
     */
    public static String load(FileSystem fs, String path, String encoding) {
        try {
            FileStatus fileStatus = fs.getFileStatus(new Path(path));
            long length = fileStatus.getLen();
            if (length > MAX_SIZE) {
                throw new IllegalArgumentException("Exceeded " + MAX_SIZE + " bytes : '" + path + "'");
            }
        } catch (Exception ex) {
            throw new ServiceException(ExceptionUtils.getMessage("Cannot access '{}'", path), ex);
        }

        FSDataInputStream is = null;
        try {
            is = fs.open(new Path(path));
            return IOUtils.toString(is, encoding);
        } catch (IOException e) {
            throw new ServiceException(ExceptionUtils.getMessage("Cannot load '{}'", path), e);
        } finally {
            IOUtils.closeQuietly(is);
        }
    }

    /**
     * "<tt>hdfs://</tt>"  HDFS?   Hadoop? FileSystem? .
     *
     * @param path HDFS Path
     * @return FileSystem
     */
    public static FileSystem getFileSystemFromPath(String path) {
        if (!path.startsWith(HDFS_URL_PREFIX) || path.startsWith("file:///")) {
            try {
                Configuration conf = new Configuration();
                return FileSystem.getLocal(conf);
            } catch (IOException e) {
                throw new ServiceException("Cannot create local file system of Apache Hadoop.", e);
            }
        }

        StringBuilder builder = new StringBuilder();
        builder.append(HDFS_URL_PREFIX);
        builder.append(getIpAddressFromPath(path));
        builder.append(getPortFromPath(path));
        return getFileSystem(builder.toString());
    }

    /**
     * HDFS Path? IP .
     *
     * @param path HDFS Path
     * @return IP Address
     */
    public static String getIpAddressFromPath(String path) {
        if (!path.startsWith(HDFS_URL_PREFIX)) {
            throw new ServiceException(ExceptionUtils.getMessage("Invalid path '{}'", path));
        }
        String[] split = org.springframework.util.StringUtils.delete(path, HDFS_URL_PREFIX).split(":");
        return split[0];
    }

    /**
     * HDFS Path? Port .
     *
     * @param path HDFS Path
     * @return Port
     */
    public static String getPortFromPath(String path) {
        if (!path.startsWith(HDFS_URL_PREFIX)) {
            throw new ServiceException(ExceptionUtils.getMessage("Invalid path '{}'", path));
        }
        String[] split = org.springframework.util.StringUtils.delete(path, HDFS_URL_PREFIX).split(":");
        if (split.length != 2) {
            throw new ServiceException("Invalid path pattern. Path pattern must be \"hdfs://IP:PORT\".");
        }
        return split[1];
    }

    /**
     *  ?  ? ??  ?.
     *
     * @param fs   Hadoop {@link FileSystem}
     * @param path 
     * @return   <tt>true</tt>
     */
    public static boolean isExist(FileSystem fs, String path) {
        try {
            return fs.exists(new Path(path));
        } catch (IOException e) {
            return false;
        }
    }

    /**
     * Hadoop {@link Configuration} FileSystem? ?.
     *
     * @param conf {@link Configuration}
     * @return FileSystem
     */
    public static FileSystem getFileSystem(Configuration conf) {
        try {
            return FileSystem.get(conf);
        } catch (Exception e) {
            throw new ServiceException("Cannot access file system of Apache Hadoop", e);
        }
    }

    public static FileSystem getFileSystem(String address, int port) {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", HdfsUtils.getHdfsUrl(address, port));
        conf.set("fs.default.name", HdfsUtils.getHdfsUrl(address, port));
        conf.set("fs.default.name", HdfsUtils.getHdfsUrl(address, port));
        return HdfsUtils.getFileSystem(conf);
    }

    public static String getHdfsUrl(String address, int port) {
        return MessageFormatter.format("hdfs://{}:{}", address, port).getMessage();
    }
}