Example usage for org.apache.hadoop.fs FileUtil stat2Paths

List of usage examples for org.apache.hadoop.fs FileUtil stat2Paths

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileUtil stat2Paths.

Prototype

public static Path[] stat2Paths(FileStatus[] stats) 

Source Link

Document

convert an array of FileStatus to an array of Path

Usage

From source file:org.apache.tinkerpop.gremlin.hadoop.structure.hdfs.HDFSTools.java

License:Apache License

public static boolean globDelete(final FileSystem fs, final String path, final boolean recursive)
        throws IOException {
    boolean deleted = false;
    for (final Path p : FileUtil.stat2Paths(fs.globStatus(new Path(path)))) {
        fs.delete(p, recursive);/*from   w ww  . java  2s .c o m*/
        deleted = true;
    }
    return deleted;
}

From source file:org.cripac.isee.vpe.data.HDFSReader.java

License:Open Source License

/**
 * If the path specifies a directory, list all the sub-files in it.
 * If the path specifies a file, return the path itself.
 *
 * @param path The path of a directory or a file.
 * @return Paths of sub-files if the path specifies a directory or
 * the given path itself if the path specifies a file.
 * @throws IOException On error reading files in Hadoop Filesystem.
 *///from  w  ww  .  j a  v a  2 s  .c  om
public List<Path> listSubfiles(@Nonnull Path path) throws IOException {
    FileStatus[] fileStatus = hdfs.listStatus(path);
    Path[] listPath = FileUtil.stat2Paths(fileStatus);
    ArrayList<Path> subfilePaths = new ArrayList<>();
    ArrayList<Path> subdirPaths = new ArrayList<>();
    for (int i = 0; i < fileStatus.length; ++i) {
        if (fileStatus[i].isDirectory()) {
            subdirPaths.add(listPath[i]);
        } else {
            subfilePaths.add(listPath[i]);
        }
    }
    for (Path sdp : subdirPaths) {
        subfilePaths.addAll(listSubfiles(sdp));
    }
    return subfilePaths;
}

From source file:org.getopt.luke.plugins.FsDirectory.java

License:Apache License

private void create() throws IOException {
    if (!fs.exists(directory)) {
        fs.mkdirs(directory);//from   w  ww .  j  a  v  a 2 s . co m
        reporter.reportStatus("Created " + directory);
    }

    if (!fs.getFileStatus(directory).isDirectory())
        throw new IOException(directory + " not a directory");

    // clear old files
    FileStatus[] fstats = fs.listStatus(directory);
    Path[] files = FileUtil.stat2Paths(fstats);
    if (files.length > 0) {
        reporter.reportStatus("Cleaning " + files.length + " old files.");
    }
    for (int i = 0; i < files.length; i++) {
        if (!fs.delete(files[i], false))
            throw new IOException("Cannot delete " + files[i]);
    }
}

From source file:org.getopt.luke.plugins.FsDirectory.java

License:Apache License

public String[] listAll() throws IOException {
    FileStatus[] fstats = fs.listStatus(directory);
    Path[] files = FileUtil.stat2Paths(fstats);
    if (files == null)
        return null;

    String[] result = new String[files.length];
    for (int i = 0; i < files.length; i++) {
        result[i] = files[i].getName();//from w  w w .  jav a 2  s.c  o m
    }
    return result;
}

From source file:org.hadoop.tdg.TestPseudoHadoop.java

License:Apache License

@Test
public void listFiles() throws IOException {
    FileStatus[] statuses = fs.listStatus(new Path(DST));
    Path[] listedPaths = FileUtil.stat2Paths(statuses);
    Assert.assertTrue(listedPaths.length == 1);
    LOG.info(listedPaths[0]);//from   w ww .j  av  a2s . c  om
}

From source file:org.hbase.tdg.HBaseTesting.java

License:Apache License

@Test
public void testMasterObserver() throws Exception {//?? don't know why it doesn't show the new path set by observer
    System.out.println("~~~~~~~" + Arrays.toString(admin.getMasterCoprocessors()));
    FileStatus[] statuses = HTU.getTestFileSystem().listStatus(new Path("/user/msoloi"));
    Path[] listedPaths = FileUtil.stat2Paths(statuses);
    //        Assert.assertTrue(listedPaths.length == 1);
    System.out.println(Arrays.toString(listedPaths));
}

From source file:org.lobid.lodmill.hadoop.CollectSubjects.java

License:Open Source License

private static Path zipMapFile(final FileSystem fs, final Path zipOutputLocation)
        throws IOException, FileNotFoundException {
    final Path[] outputFiles = FileUtil
            .stat2Paths(fs.listStatus(new Path(MAP_FILE_NAME), new Utils.OutputFileUtils.OutputFilesFilter()));
    try (final FSDataOutputStream fos = fs.create(zipOutputLocation);
            final ZipOutputStream zos = new ZipOutputStream(fos)) {
        add(zos, new ZipEntry("data"), fs.open(outputFiles[0]));
        add(zos, new ZipEntry("index"), fs.open(outputFiles[1]));
    }//from   ww w . j av a 2s . co  m
    return zipOutputLocation;
}

From source file:org.lobid.lodmill.hadoop.IntegrationTestCollectSubjects.java

License:Open Source License

private void writeZippedMapFile() throws IOException {
    long time = System.currentTimeMillis();
    final Path[] outputFiles = FileUtil.stat2Paths(
            getFileSystem().listStatus(new Path(HDFS_OUT), new Utils.OutputFileUtils.OutputFilesFilter()));
    final Path zipOutputLocation = new Path(HDFS_OUT + "/" + CollectSubjects.MAP_FILE_ZIP);
    CollectSubjects.asZippedMapFile(hdfs, outputFiles[0], zipOutputLocation);
    final FileStatus fileStatus = hdfs.getFileStatus(zipOutputLocation);
    assertTrue(fileStatus.getModificationTime() >= time);
}

From source file:org.lobid.lodmill.hadoop.IntegrationTestCollectSubjects.java

License:Open Source License

private StringBuilder readResults() throws IOException {
    final Path[] outputFiles = FileUtil.stat2Paths(
            getFileSystem().listStatus(new Path(HDFS_OUT), new Utils.OutputFileUtils.OutputFilesFilter()));
    assertEquals("Expect a single output file", 1, outputFiles.length);
    final StringBuilder builder = new StringBuilder();
    try (final Scanner scanner = new Scanner(getFileSystem().open(outputFiles[0]))) {
        while (scanner.hasNextLine())
            builder.append(scanner.nextLine()).append("\n");
    }//w w  w . j  ava 2  s  .  c  om
    return builder;
}

From source file:org.mrgeo.hdfs.image.HdfsMrsImageReader.java

License:Apache License

@Override
public long calculateTileCount() {
    int count = 0;
    try {//from www .ja va  2  s . c o  m
        final FileSystem fs = imagePath.getFileSystem(conf);
        final Path[] names = FileUtil.stat2Paths(fs.listStatus(imagePath));
        Arrays.sort(names);
        try (DataOutputBuffer key = new DataOutputBuffer()) {
            for (final Path name : names) {
                final FileStatus[] dirFiles = fs.listStatus(name);
                for (final FileStatus dirFile : dirFiles) {
                    if (dirFile.getPath().getName().equals("index")) {
                        try (SequenceFile.Reader index = new SequenceFile.Reader(fs, dirFile.getPath(), conf)) {
                            while (index.nextRawKey(key) >= 0) {
                                count++;
                            }
                        }
                    }
                }
            }
        }
        return count;
    } catch (final IOException e) {
        throw new MrsImageException(e);
    }
}