Example usage for org.apache.hadoop.fs FileStatus isDirectory

List of usage examples for org.apache.hadoop.fs FileStatus isDirectory

Introduction

In this page you can find the example usage for org.apache.hadoop.fs FileStatus isDirectory.

Prototype

public boolean isDirectory() 

Source Link

Document

Is this a directory?

Usage

From source file:io.prestosql.plugin.hive.metastore.SemiTransactionalHiveMetastore.java

License:Apache License

private static RecursiveDeleteResult doRecursiveDeleteFiles(FileSystem fileSystem, Path directory,
        List<String> filePrefixes, boolean deleteEmptyDirectories) {
    // don't delete hidden presto directories
    if (directory.getName().startsWith(".presto")) {
        return new RecursiveDeleteResult(false, ImmutableList.of());
    }//from   ww  w  . j a v a  2s .c  om

    FileStatus[] allFiles;
    try {
        allFiles = fileSystem.listStatus(directory);
    } catch (IOException e) {
        ImmutableList.Builder<String> notDeletedItems = ImmutableList.builder();
        notDeletedItems.add(directory.toString() + "/**");
        return new RecursiveDeleteResult(false, notDeletedItems.build());
    }

    boolean allDescendentsDeleted = true;
    ImmutableList.Builder<String> notDeletedEligibleItems = ImmutableList.builder();
    for (FileStatus fileStatus : allFiles) {
        if (fileStatus.isFile()) {
            Path filePath = fileStatus.getPath();
            String fileName = filePath.getName();
            boolean eligible = false;
            // never delete presto dot files
            if (!fileName.startsWith(".presto")) {
                eligible = filePrefixes.stream().anyMatch(fileName::startsWith);
            }
            if (eligible) {
                if (!deleteIfExists(fileSystem, filePath, false)) {
                    allDescendentsDeleted = false;
                    notDeletedEligibleItems.add(filePath.toString());
                }
            } else {
                allDescendentsDeleted = false;
            }
        } else if (fileStatus.isDirectory()) {
            RecursiveDeleteResult subResult = doRecursiveDeleteFiles(fileSystem, fileStatus.getPath(),
                    filePrefixes, deleteEmptyDirectories);
            if (!subResult.isDirectoryNoLongerExists()) {
                allDescendentsDeleted = false;
            }
            if (!subResult.getNotDeletedEligibleItems().isEmpty()) {
                notDeletedEligibleItems.addAll(subResult.getNotDeletedEligibleItems());
            }
        } else {
            allDescendentsDeleted = false;
            notDeletedEligibleItems.add(fileStatus.getPath().toString());
        }
    }
    if (allDescendentsDeleted && deleteEmptyDirectories) {
        verify(notDeletedEligibleItems.build().isEmpty());
        if (!deleteIfExists(fileSystem, directory, false)) {
            return new RecursiveDeleteResult(false, ImmutableList.of(directory.toString() + "/"));
        }
        return new RecursiveDeleteResult(true, ImmutableList.of());
    }
    return new RecursiveDeleteResult(false, notDeletedEligibleItems.build());
}

From source file:io.prestosql.plugin.hive.s3.TestPrestoS3FileSystem.java

License:Apache License

@Test
public void testEmptyDirectory() throws Exception {
    try (PrestoS3FileSystem fs = new PrestoS3FileSystem()) {
        MockAmazonS3 s3 = new MockAmazonS3() {
            @Override//  w  w  w.  ja  v a  2  s. co  m
            public ObjectMetadata getObjectMetadata(GetObjectMetadataRequest getObjectMetadataRequest) {
                if (getObjectMetadataRequest.getKey().equals("empty-dir/")) {
                    ObjectMetadata objectMetadata = new ObjectMetadata();
                    objectMetadata.setContentType(S3_DIRECTORY_OBJECT_CONTENT_TYPE);
                    return objectMetadata;
                }
                return super.getObjectMetadata(getObjectMetadataRequest);
            }
        };
        fs.initialize(new URI("s3n://test-bucket/"), new Configuration());
        fs.setS3Client(s3);

        FileStatus fileStatus = fs.getFileStatus(new Path("s3n://test-bucket/empty-dir/"));
        assertTrue(fileStatus.isDirectory());

        fileStatus = fs.getFileStatus(new Path("s3n://test-bucket/empty-dir"));
        assertTrue(fileStatus.isDirectory());
    }
}

From source file:joshelser.HdfsServiceImpl.java

License:Apache License

@Override
public String ls(String directory) throws TException {
    StringBuilder sb = new StringBuilder(64);
    try {/* w  ww  .  jav  a  2  s .  c o m*/
        log.debug("Running as {}", UserGroupInformation.getCurrentUser());
        for (FileStatus stat : fs.listStatus(new Path(directory))) {
            sb.append(stat.getPath().getName());
            if (stat.isDirectory()) {
                sb.append("/");
            }
            sb.append("\n");
        }
    } catch (FileNotFoundException e) {
        System.err.println("Got FileNotFoundException");
        e.printStackTrace(System.err);
        throw new TException(e);
    } catch (IllegalArgumentException e) {
        System.err.println("Got IllegalArgumentException");
        e.printStackTrace(System.err);
        throw new TException(e);
    } catch (IOException e) {
        System.err.println("Got IOException");
        e.printStackTrace(System.err);
        throw new TException(e);
    }

    return sb.toString();
}

From source file:ml.shifu.guagua.hadoop.GuaguaMRUnitDriver.java

License:Apache License

/**
 * List input directories.//  ww  w.  j  a v a2 s.  c  o m
 * Subclasses may override to, e.g., select only files matching a regular expression.
 * 
 * @param job
 *            the job to list input paths for
 * @return array of FileStatus objects
 * @throws IOException
 *             if zero items.
 * @throws InvalidInputException
 *             If any IOException for input files.
 */
protected List<FileStatus> listStatus(Configuration conf, String input) throws IOException {
    List<FileStatus> result = new ArrayList<FileStatus>();
    Path[] dirs = getInputPaths(input);
    if (dirs.length == 0) {
        throw new IOException("No input paths specified in job");
    }

    List<IOException> errors = new ArrayList<IOException>();

    // creates a MultiPathFilter with the hiddenFileFilter and the
    // user provided one (if any).
    List<PathFilter> filters = new ArrayList<PathFilter>();
    filters.add(hiddenFileFilter);
    PathFilter inputFilter = new MultiPathFilter(filters);

    for (int i = 0; i < dirs.length; ++i) {
        Path p = dirs[i];
        FileSystem fs = p.getFileSystem(conf);
        FileStatus[] matches = fs.globStatus(p, inputFilter);
        if (matches == null) {
            errors.add(new IOException("Input path does not exist: " + p));
        } else if (matches.length == 0) {
            errors.add(new IOException("Input Pattern " + p + " matches 0 files"));
        } else {
            for (FileStatus globStat : matches) {
                if (globStat.isDirectory()) {
                    for (FileStatus stat : fs.listStatus(globStat.getPath(), inputFilter)) {
                        result.add(stat);
                    }
                } else {
                    result.add(globStat);
                }
            }
        }
    }

    if (!errors.isEmpty()) {
        throw new IOException(errors.toString());
    }
    return result;
}

From source file:net.sf.jfilesync.plugins.net.items.THdfs_plugin.java

License:Apache License

protected TFileProperties extractFileProperties(FileStatus file, FileStatus[] filesInDir) throws IOException {
    TFileProperties prop = new TFileProperties();
    String filename = file.getPath().getName();
    prop.setFileName(filename);/*from   w ww.  j a  v a 2s .  co  m*/

    final String cwd = fs.getWorkingDirectory().toUri().getPath();
    String fname = null;
    if (cwd.endsWith("/")) {
        fname = cwd + filename;
    } else {
        fname = cwd + "/" + filename;
    }
    prop.setAbsoluteFileName(fname);

    if (filename.startsWith(".")) {
        prop.setHiddenFlag(true);
    }

    // There is a little problem with ftp.getSize(), because it's sometimes
    // 0
    prop.setFileSize(new BigInteger(Long.toString(file.getLen())));
    // System.out.println(file.getName() + " , " + file.getTimestamp());
    prop.setFileModTime(file.getModificationTime());
    // System.out.println("file: " + fname);
    // System.out.println("isDirectory: " + file.isDirectory());
    prop.setDirectoryFlag(file.isDirectory());
    prop.setLinkFlag(file.isSymlink());

    int permissions = 0;

    permissions |= file.isDirectory() ? FilePermissions.S_IFDIR : 0;
    permissions |= file.isSymlink() ? FilePermissions.S_IFLNK : 0;
    permissions |= file.getPermission().toShort();

    final TFileAttributes attr = new TFileAttributes();
    attr.setPermissions(permissions);
    prop.setAttributes(attr);

    /*
     * what needs to be done is implement caching of directories which have
     * to be listed for link detection implement recursive link detection
     * for links to links SaHu July 2006
     */

    /*
     * if( file.isSymbolicLink() ) { System.out.println("link target : " +
     * file.getLink()); }
     */

    // if( file.isSymbolicLink() ) {
    // // check if link points to dir
    // final String linkTarget = file.getLink();
    // final String linkTargetBaseName =
    // getPathControl().basename(linkTarget);
    // //System.out.println("link target basename: " + linkTargetBaseName);
    // if( linkTarget != null ) {
    // String linkContaingPath =
    // getPathControl().getPathLevelUp(linkTarget);
    // FTPFile[] targetFiles = null;
    // if( linkContaingPath.equals("") || linkContaingPath.equals(cwd) ) {
    // targetFiles = filesInDir;
    // } else {
    // //System.out.println("check dir : " + linkContaingPath);
    // targetFiles = ftpClient.listFiles(linkContaingPath);
    // }
    //
    //
    // if( targetFiles != null ) {
    // for(int i=0; i<targetFiles.length; i++) {
    // //System.out.println("> " + targetFiles[i].getName());
    // if( targetFiles[i].getName().equals(linkTargetBaseName) ) {
    // if( targetFiles[i].isDirectory() ) {
    // prop.setDirectoryFlag(true);
    // }
    // break;
    // }
    // }
    // }
    // }
    // }

    if (file.isSymlink()) {
        Path path = file.getSymlink();
        if (fs.isDirectory(path))
            prop.setDirectoryFlag(true);
    }

    return prop;
}

From source file:org.apache.accumulo.core.clientImpl.bulk.BulkImport.java

License:Apache License

private static List<FileStatus> filterInvalid(FileStatus[] files) {
    ArrayList<FileStatus> fileList = new ArrayList<>(files.length);

    for (FileStatus fileStatus : files) {

        String fname = fileStatus.getPath().getName();

        if (fname.equals("_SUCCESS") || fname.equals("_logs")) {
            log.debug("Ignoring file likely created by map reduce : {}", fileStatus.getPath());
            continue;
        }//w  w w.  j a v a 2s  .  co  m

        if (fileStatus.isDirectory()) {
            log.warn("{} is a directory, ignoring.", fileStatus.getPath());
            continue;
        }

        String[] sa = fname.split("\\.");
        String extension = "";
        if (sa.length > 1) {
            extension = sa[sa.length - 1];
        }

        if (!FileOperations.getValidExtensions().contains(extension)) {
            log.warn("{} does not have a valid extension, ignoring", fileStatus.getPath());
            continue;
        }

        fileList.add(fileStatus);
    }

    return fileList;
}

From source file:org.apache.accumulo.core.clientImpl.BulkImport.java

License:Apache License

private static List<FileStatus> filterInvalid(FileStatus[] files) {
    ArrayList<FileStatus> fileList = new ArrayList<>(files.length);

    for (FileStatus fileStatus : files) {

        String fname = fileStatus.getPath().getName();

        if (fname.equals("_SUCCESS") || fname.equals("_logs")) {
            log.debug("Ignoring file likely created by map reduce : {}", fileStatus.getPath());
            continue;
        }//from  w ww  .j av a  2 s.co m

        if (fileStatus.isDirectory()) {
            log.warn("{} is a directory, ignoring.", fileStatus.getPath());
            continue;
        }

        String sa[] = fname.split("\\.");
        String extension = "";
        if (sa.length > 1) {
            extension = sa[sa.length - 1];
        }

        if (!FileOperations.getValidExtensions().contains(extension)) {
            log.warn("{} does not have a valid extension, ignoring", fileStatus.getPath());
            continue;
        }

        fileList.add(fileStatus);
    }

    return fileList;
}

From source file:org.apache.accumulo.master.tableOps.BulkImport.java

License:Apache License

@Override
public Repo<Master> call(long tid, Master master) throws Exception {
    log.debug(" tid " + tid + " sourceDir " + sourceDir);

    Utils.getReadLock(tableId, tid).lock();

    // check that the error directory exists and is empty
    VolumeManager fs = master.getFileSystem();

    Path errorPath = new Path(errorDir);
    FileStatus errorStatus = null;
    try {//from w ww.jav  a2s  .co  m
        errorStatus = fs.getFileStatus(errorPath);
    } catch (FileNotFoundException ex) {
        // ignored
    }
    if (errorStatus == null)
        throw new AcceptableThriftTableOperationException(tableId, null, TableOperation.BULK_IMPORT,
                TableOperationExceptionType.BULK_BAD_ERROR_DIRECTORY, errorDir + " does not exist");
    if (!errorStatus.isDirectory())
        throw new AcceptableThriftTableOperationException(tableId, null, TableOperation.BULK_IMPORT,
                TableOperationExceptionType.BULK_BAD_ERROR_DIRECTORY, errorDir + " is not a directory");
    if (fs.listStatus(errorPath).length != 0)
        throw new AcceptableThriftTableOperationException(tableId, null, TableOperation.BULK_IMPORT,
                TableOperationExceptionType.BULK_BAD_ERROR_DIRECTORY, errorDir + " is not empty");

    ZooArbitrator.start(Constants.BULK_ARBITRATOR_TYPE, tid);
    master.updateBulkImportStatus(sourceDir, BulkImportState.MOVING);
    // move the files into the directory
    try {
        String bulkDir = prepareBulkImport(master, fs, sourceDir, tableId);
        log.debug(" tid " + tid + " bulkDir " + bulkDir);
        return new LoadFiles(tableId, sourceDir, bulkDir, errorDir, setTime);
    } catch (IOException ex) {
        log.error("error preparing the bulk import directory", ex);
        throw new AcceptableThriftTableOperationException(tableId, null, TableOperation.BULK_IMPORT,
                TableOperationExceptionType.BULK_BAD_INPUT_DIRECTORY, sourceDir + ": " + ex);
    }
}

From source file:org.apache.accumulo.master.tableOps.BulkImport.java

License:Apache License

private String prepareBulkImport(Master master, final VolumeManager fs, String dir, String tableId)
        throws Exception {
    final Path bulkDir = createNewBulkDir(fs, tableId);

    MetadataTableUtil.addBulkLoadInProgressFlag(master,
            "/" + bulkDir.getParent().getName() + "/" + bulkDir.getName());

    Path dirPath = new Path(dir);
    FileStatus[] mapFiles = fs.listStatus(dirPath);

    final UniqueNameAllocator namer = UniqueNameAllocator.getInstance();

    int workerCount = master.getConfiguration().getCount(Property.MASTER_BULK_RENAME_THREADS);
    SimpleThreadPool workers = new SimpleThreadPool(workerCount, "bulk move");
    List<Future<Exception>> results = new ArrayList<>();

    for (FileStatus file : mapFiles) {
        final FileStatus fileStatus = file;
        results.add(workers.submit(new Callable<Exception>() {
            @Override/*from ww  w.j  av  a2  s  .  c  om*/
            public Exception call() throws Exception {
                try {
                    String sa[] = fileStatus.getPath().getName().split("\\.");
                    String extension = "";
                    if (sa.length > 1) {
                        extension = sa[sa.length - 1];

                        if (!FileOperations.getValidExtensions().contains(extension)) {
                            log.warn(fileStatus.getPath() + " does not have a valid extension, ignoring");
                            return null;
                        }
                    } else {
                        // assume it is a map file
                        extension = Constants.MAPFILE_EXTENSION;
                    }

                    if (extension.equals(Constants.MAPFILE_EXTENSION)) {
                        if (!fileStatus.isDirectory()) {
                            log.warn(fileStatus.getPath() + " is not a map file, ignoring");
                            return null;
                        }

                        if (fileStatus.getPath().getName().equals("_logs")) {
                            log.info(fileStatus.getPath()
                                    + " is probably a log directory from a map/reduce task, skipping");
                            return null;
                        }
                        try {
                            FileStatus dataStatus = fs
                                    .getFileStatus(new Path(fileStatus.getPath(), MapFile.DATA_FILE_NAME));
                            if (dataStatus.isDirectory()) {
                                log.warn(fileStatus.getPath() + " is not a map file, ignoring");
                                return null;
                            }
                        } catch (FileNotFoundException fnfe) {
                            log.warn(fileStatus.getPath() + " is not a map file, ignoring");
                            return null;
                        }
                    }

                    String newName = "I" + namer.getNextName() + "." + extension;
                    Path newPath = new Path(bulkDir, newName);
                    try {
                        fs.rename(fileStatus.getPath(), newPath);
                        log.debug("Moved " + fileStatus.getPath() + " to " + newPath);
                    } catch (IOException E1) {
                        log.error("Could not move: {} {}", fileStatus.getPath().toString(), E1.getMessage());
                    }

                } catch (Exception ex) {
                    return ex;
                }
                return null;
            }
        }));
    }
    workers.shutdown();
    while (!workers.awaitTermination(1000L, TimeUnit.MILLISECONDS)) {
    }

    for (Future<Exception> ex : results) {
        if (ex.get() != null) {
            throw ex.get();
        }
    }
    return bulkDir.toString();
}

From source file:org.apache.accumulo.master.tableOps.bulkVer1.BulkImport.java

License:Apache License

@Override
public Repo<Master> call(long tid, Master master) throws Exception {
    log.debug(" tid {} sourceDir {}", tid, sourceDir);

    Utils.getReadLock(tableId, tid).lock();

    // check that the error directory exists and is empty
    VolumeManager fs = master.getFileSystem();

    Path errorPath = new Path(errorDir);
    FileStatus errorStatus = null;
    try {//  ww w.jav a2 s .c o m
        errorStatus = fs.getFileStatus(errorPath);
    } catch (FileNotFoundException ex) {
        // ignored
    }
    if (errorStatus == null)
        throw new AcceptableThriftTableOperationException(tableId.canonicalID(), null,
                TableOperation.BULK_IMPORT, TableOperationExceptionType.BULK_BAD_ERROR_DIRECTORY,
                errorDir + " does not exist");
    if (!errorStatus.isDirectory())
        throw new AcceptableThriftTableOperationException(tableId.canonicalID(), null,
                TableOperation.BULK_IMPORT, TableOperationExceptionType.BULK_BAD_ERROR_DIRECTORY,
                errorDir + " is not a directory");
    if (fs.listStatus(errorPath).length != 0)
        throw new AcceptableThriftTableOperationException(tableId.canonicalID(), null,
                TableOperation.BULK_IMPORT, TableOperationExceptionType.BULK_BAD_ERROR_DIRECTORY,
                errorDir + " is not empty");

    ZooArbitrator.start(Constants.BULK_ARBITRATOR_TYPE, tid);
    master.updateBulkImportStatus(sourceDir, BulkImportState.MOVING);
    // move the files into the directory
    try {
        String bulkDir = prepareBulkImport(master, fs, sourceDir, tableId);
        log.debug(" tid {} bulkDir {}", tid, bulkDir);
        return new LoadFiles(tableId, sourceDir, bulkDir, errorDir, setTime);
    } catch (IOException ex) {
        log.error("error preparing the bulk import directory", ex);
        throw new AcceptableThriftTableOperationException(tableId.canonicalID(), null,
                TableOperation.BULK_IMPORT, TableOperationExceptionType.BULK_BAD_INPUT_DIRECTORY,
                sourceDir + ": " + ex);
    }
}