Example usage for org.apache.hadoop.fs FileSystem isFile

List of usage examples for org.apache.hadoop.fs FileSystem isFile

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileSystem isFile.

Prototype

@Deprecated
public boolean isFile(Path f) throws IOException 

Source Link

Document

True iff the named path is a regular file.

Usage

From source file:org.openflamingo.uploader.util.HdfsUtils.java

License:Open Source License

/**
 *   ? ?.//from   ww  w  . j av  a2s.  c  o  m
 *
 * @param fs   FileSystem
 * @param path ? Path
 * @return ?  <tt>true</tt>
 */
public static boolean isDir(FileSystem fs, String path) {
    try {
        return !fs.isFile(new Path(path));
    } catch (Exception ex) {
        throw new FileSystemException(
                ExceptionUtils.getMessage("  '{}'?   .", path), ex);
    }
}

From source file:org.openflamingo.uploader.util.HdfsUtils.java

License:Open Source License

/**
 *   ?? ?.//from   ww  w .  jav  a  2  s .com
 *
 * @param fs   FileSystem
 * @param path ? Path
 * @return ??  <tt>true</tt>
 */
public static boolean isFile(FileSystem fs, String path) {
    try {
        return fs.isFile(new Path(path));
    } catch (Exception ex) {
        throw new FileSystemException(
                ExceptionUtils.getMessage("  '{}'?   .", path), ex);
    }
}

From source file:org.openflamingo.util.HdfsUtils.java

License:Apache License

/**
 *   ? ?./*from ww  w  . j a  v a 2s  .  co m*/
 *
 * @param fs   FileSystem
 * @param path ? Path
 * @return ?  <tt>true</tt>
 */
public static boolean isDir(FileSystem fs, String path) {
    try {
        return !fs.isFile(new Path(path));
    } catch (Exception ex) {
        throw new FileSystemException(ExceptionUtils.getMessage("Cannot access '{}'", path), ex);
    }
}

From source file:org.openflamingo.util.HdfsUtils.java

License:Apache License

/**
 *   ?? ?./*from   ww w.j a  v  a2s  . c  o  m*/
 *
 * @param fs   FileSystem
 * @param path ? Path
 * @return ??  <tt>true</tt>
 */
public static boolean isFile(FileSystem fs, String path) {
    try {
        return fs.isFile(new Path(path));
    } catch (Exception ex) {
        throw new FileSystemException(ExceptionUtils.getMessage("Cannot access '{}'", path), ex);
    }
}

From source file:org.openflamingo.web.designer.DesignerController.java

License:Apache License

/**
 * ? .  OpenGraph  ?   CLI  ? ?   /*from www.j  ava2s . com*/
 * ???? ?  .
 *
 * @return Response REST JAXB Object
 */
@RequestMapping(value = "previewHDFSFile", method = RequestMethod.GET)
@ResponseStatus(HttpStatus.OK)
@ResponseBody
public Response previewHDFSFile(@RequestParam String inputPath, String delimiter, long engineId) {
    Response response = new Response();

    try {
        Configuration configuration = new Configuration();

        String hadoopPath = ConfigurationManager.getConfigurationManager().get("hadoop.home");
        // Set HADOOP_HOME in local system path
        configuration.addResource(new Path(hadoopPath + "/conf/core-site.xml"));
        configuration.addResource(new Path(hadoopPath + "/conf/hdfs-site.xml"));

        // Get hadoop cluster
        Engine engine = engineService.getEngine(engineId);
        if (engine == null) {
            throw new IllegalArgumentException(message("S_DESIGNER", "NOT_VALID_WORKFLOW_ENG"));
        }
        HadoopCluster hadoopCluster = hadoopClusterAdminService.getHadoopCluster(engine.getHadoopClusterId());
        String hdfsUrl = hadoopCluster.getHdfsUrl();

        FileSystem fileSystem = FileSystem.get(configuration);
        Path path = new Path(hdfsUrl + inputPath);

        System.out.println("fileSystem......" + fileSystem);
        System.out.println("path......" + path);

        if (!fileSystem.isFile(path)) {
            System.err.println("Input should be a file");
        }

        List<PreviewFile> list = new ArrayList<PreviewFile>();
        PreviewFile previewFile = new PreviewFile();
        BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(fileSystem.open(path)));
        String line = bufferedReader.readLine();
        int columnLength = -1;

        List<Integer> columnIndexList = new ArrayList<Integer>();
        List<String> rowDataList = new ArrayList<String>();

        String[] split = line.split(delimiter);

        for (int i = 0; i < split.length; i++) {
            columnLength++;
            columnIndexList.add(columnLength);
            rowDataList.add(split[i]);
        }

        //Set field number
        previewFile.setColumnIndex(columnIndexList);
        //Set field data
        previewFile.setRowData(rowDataList);
        list.add(previewFile);

        response.getList().addAll(list);
        response.setObject(delimiter);
        response.setTotal(columnLength);
        response.setSuccess(true);
        return response;
    } catch (Exception ex) {
        logger.warn("{}", ex.getMessage(), ex);
        response.getError().setMessage(ex.getMessage());
        response.setSuccess(false);
        return response;
    }
}

From source file:org.springframework.data.hadoop.fs.FsShell.java

License:Apache License

public void cp(String src, String src2, String... dst) {
    Object[] va = parseVarargs(src, src2, dst);
    @SuppressWarnings("unchecked")
    List<Path> srcs = (List<Path>) va[0];
    Path dstPath = (Path) va[1];

    try {//from w ww .jav a  2s .  co  m

        FileSystem dstFs = dstPath.getFileSystem(configuration);
        boolean isDestDir = !dstFs.isFile(dstPath);

        if (StringUtils.hasText(src2) || (ObjectUtils.isEmpty(dst) && dst.length > 2)) {
            if (!isDestDir) {
                throw new IllegalArgumentException("When copying multiple files, destination " + dstPath.toUri()
                        + " should be a directory.");
            }
        }

        for (Path path : srcs) {
            FileSystem srcFs = path.getFileSystem(configuration);
            Path[] from = FileUtil.stat2Paths(srcFs.globStatus(path), path);
            if (!ObjectUtils.isEmpty(from) && from.length > 1 && !isDestDir) {
                throw new IllegalArgumentException(
                        "When copying multiple files, destination should be a directory.");
            }
            for (Path fromPath : from) {
                FileUtil.copy(srcFs, fromPath, dstFs, dstPath, false, configuration);
            }
        }
    } catch (IOException ex) {
        throw new HadoopException("Cannot copy resources " + ex.getMessage(), ex);
    }
}

From source file:org.springframework.data.hadoop.fs.FsShell.java

License:Apache License

public void mv(String src, String src2, String... dst) {
    Object[] va = parseVarargs(src, src2, dst);
    @SuppressWarnings({ "unchecked" })
    List<Path> sources = (List<Path>) va[0];
    Path dstPath = (Path) va[1];

    try {/*from  w  w  w  . j a  v  a  2 s.c o  m*/
        FileSystem dstFs = getFS(dstPath);
        boolean isDstDir = !dstFs.isFile(dstPath);

        if (sources.size() > 1 && !isDstDir) {
            throw new IllegalArgumentException("Destination must be a dir when moving multiple files");
        }

        for (Path srcPath : sources) {
            FileSystem srcFs = getFS(srcPath);
            URI srcURI = srcFs.getUri();
            URI dstURI = dstFs.getUri();
            if (srcURI.compareTo(dstURI) != 0) {
                throw new IllegalArgumentException("src and destination filesystems do not match.");
            }
            Path[] srcs = FileUtil.stat2Paths(srcFs.globStatus(srcPath), srcPath);
            if (srcs.length > 1 && !isDstDir) {
                throw new IllegalArgumentException(
                        "When moving multiple files, destination should be a directory.");
            }
            for (Path s : srcs) {
                if (!srcFs.rename(s, dstPath)) {
                    FileStatus srcFstatus = null;
                    FileStatus dstFstatus = null;
                    try {
                        srcFstatus = srcFs.getFileStatus(s);
                    } catch (FileNotFoundException e) {
                        // ignore
                    }
                    try {
                        dstFstatus = dstFs.getFileStatus(dstPath);
                    } catch (IOException e) {
                    }
                    if ((srcFstatus != null) && (dstFstatus != null)) {
                        if (srcFstatus.isDir() && !dstFstatus.isDir()) {
                            throw new IllegalArgumentException(
                                    "cannot overwrite non directory " + dstPath + " with directory " + s);
                        }
                    }
                    throw new HadoopException("Failed to rename " + s + " to " + dstPath);
                }
            }
        }
    } catch (IOException ex) {
        throw new HadoopException("Cannot rename resources " + ex.getMessage(), ex);
    }
}

From source file:org.springframework.data.hadoop.fs.FsShell.java

License:Apache License

private void setrep(short replication, boolean recursive, FileSystem srcFs, Path src, List<Path> waitList)
        throws IOException {
    if (srcFs.isFile(src)) {
        if (srcFs.setReplication(src, replication)) {
            if (waitList != null) {
                waitList.add(src);//from  w  ww . ja v a2s.c o  m
            }
        } else {
            throw new HadoopException("Cannot set replication for " + src);
        }
    } else {
        if (recursive) {
            FileStatus items[] = srcFs.listStatus(src);
            if (!ObjectUtils.isEmpty(items)) {
                for (FileStatus status : items) {
                    setrep(replication, recursive, srcFs, status.getPath(), waitList);
                }
            }
        }
    }
}

From source file:org.springframework.data.hadoop.fs.FsShell.java

License:Apache License

public Collection<String> text(String... uris) {
    Collection<String> texts = new PrettyPrintList<String>(new ListPrinter<String>() {

        @Override//  w ww.  ja v  a 2 s. c  o  m
        public String toString(String e) throws Exception {
            return e + "\n";
        }
    });

    for (String uri : uris) {

        InputStream in = null;
        FSDataInputStream i = null;

        try {
            Path srcPat = new Path(uri);
            FileSystem srcFs = getFS(srcPat);

            for (Path src : FileUtil.stat2Paths(srcFs.globStatus(srcPat), srcPat)) {
                Assert.isTrue(srcFs.isFile(src), "Source must be a file");
                i = srcFs.open(src);
                switch (i.readShort()) {
                case 0x1f8b: // RFC 1952
                    i.seek(0);
                    in = new GZIPInputStream(i);
                    break;
                case 0x5345: // 'S' 'E'
                    if (i.readByte() == 'Q') {
                        i.close();
                        in = new TextRecordInputStream(src, srcFs, configuration);
                    }
                    break;
                default:
                    in = i;
                    break;
                }
                i.seek(0);
                texts.add(getContent(in));
            }
        } catch (IOException ex) {
            throw new HadoopException("Cannot read " + uri + ";" + ex.getMessage(), ex);
        } finally {
            IOUtils.closeStream(in);
            IOUtils.closeStream(i);
        }
    }
    return texts;
}

From source file:org.talend.components.test.MiniDfsResource.java

License:Open Source License

/**
 * Tests that a file on the HDFS cluster contains the given texts.
 *
 * @param path the name of the file on the HDFS cluster
 * @param expected the expected lines in the file (not including terminating end-of-lines).
 *//*from  w  ww . j  av  a  2 s.c  om*/
public static void assertReadFile(FileSystem fs, String path, String... expected) throws IOException {
    Path p = new Path(path);
    if (fs.isFile(p)) {
        try (BufferedReader r = new BufferedReader(new InputStreamReader(fs.open(new Path(path))))) {
            for (String line : expected)
                assertThat(r.readLine(), is(line));
            assertThat(r.readLine(), nullValue());
        }
    } else if (fs.isDirectory(p)) {
        HashSet<String> expect = new HashSet<>(Arrays.asList(expected));
        for (FileStatus fstatus : fs.listStatus(p)) {
            try (BufferedReader r = new BufferedReader(new InputStreamReader(fs.open(fstatus.getPath())))) {
                String line = null;
                while (null != (line = r.readLine()))
                    if (!expect.remove(line))
                        fail("Unexpected line: " + line);
            }
        }
        // Check before asserting for the message.
        if (expect.size() != 0)
            assertThat("Not all lines found: " + expect.iterator().next(), expect, hasSize(0));
    } else {
        fail("No such path: " + path);
    }
}