Example usage for org.apache.commons.vfs2 FileDepthSelector FileDepthSelector

List of usage examples for org.apache.commons.vfs2 FileDepthSelector FileDepthSelector

Introduction

In this page you can find the example usage for org.apache.commons.vfs2 FileDepthSelector FileDepthSelector.

Prototype

public FileDepthSelector(final int minDepth, final int maxDepth) 

Source Link

Document

Creates a selector with the given minimum and maximum depths.

Usage

From source file:org.cloudifysource.esc.installer.filetransfer.VfsFileTransfer.java

@Override
public void copyFiles(final InstallationDetails details, final Set<String> excludedFiles,
        final List<File> additionalFiles, final long endTimeMillis)
        throws TimeoutException, InstallerException {

    logger.fine("Copying files to: " + host + " from local dir: " + localDir.getName().getPath() + " excluding "
            + excludedFiles.toString());

    try {/*  w ww  .  j a  v a  2s . c  o  m*/

        if (remoteDir.exists()) {
            FileType type = remoteDir.getType();
            if (!type.equals(FileType.FOLDER)) {
                throw new InstallerException("The remote location: " + remoteDir.getName().getFriendlyURI()
                        + " exists but is not a directory");
            }

            if (deleteRemoteDirectoryContents) {
                logger.info("Deleting contents of remote directory: " + remoteDir.getName().getFriendlyURI());
                remoteDir.delete(new FileDepthSelector(1, Integer.MAX_VALUE));
            }
            FileObject[] children = remoteDir.getChildren();
            if (children.length > 0) {

                throw new InstallerException(
                        "The remote directory: " + remoteDir.getName().getFriendlyURI() + " is not empty");
            }
        }

        remoteDir.copyFrom(localDir, new FileSelector() {

            @Override
            public boolean includeFile(final FileSelectInfo fileInfo) throws Exception {
                if (excludedFiles.contains(fileInfo.getFile().getName().getBaseName())) {
                    logger.fine(fileInfo.getFile().getName().getBaseName() + " excluded");
                    return false;

                }
                final FileObject remoteFile = fileSystemManager.resolveFile(remoteDir,
                        localDir.getName().getRelativeName(fileInfo.getFile().getName()));

                if (!remoteFile.exists()) {
                    logger.fine(fileInfo.getFile().getName().getBaseName() + " missing on server");
                    return true;
                }

                if (fileInfo.getFile().getType() == FileType.FILE) {
                    final long remoteSize = remoteFile.getContent().getSize();
                    final long localSize = fileInfo.getFile().getContent().getSize();
                    final boolean res = localSize != remoteSize;
                    if (res) {
                        logger.fine(fileInfo.getFile().getName().getBaseName() + " different on server");
                    }
                    return res;
                }
                return false;

            }

            @Override
            public boolean traverseDescendents(final FileSelectInfo fileInfo) throws Exception {
                return true;
            }
        });

        for (final File file : additionalFiles) {
            logger.fine("copying file: " + file.getAbsolutePath() + " to remote directory");
            final FileObject fileObject = fileSystemManager.resolveFile("file:" + file.getAbsolutePath());
            final FileObject remoteFile = remoteDir.resolveFile(file.getName());
            remoteFile.copyFrom(fileObject, new AllFileSelector());
        }

        logger.fine("Copying files to: " + host + " completed.");
    } catch (final FileSystemException e) {
        throw new InstallerException("Failed to copy files to remote host " + host + ": " + e.getMessage(), e);

    }
    checkTimeout(endTimeMillis);

}

From source file:org.datacleaner.user.upgrade.DataCleanerHomeUpgrader.java

private FileObject findUpgradeCandidate(FileObject target) throws FileSystemException {
    FileObject parentFolder = target.getParent();

    List<FileObject> versionFolders = new ArrayList<>();
    FileObject[] allFoldersInParent = parentFolder.findFiles(new FileDepthSelector(1, 1));
    for (FileObject folderInParent : allFoldersInParent) {
        final String folderInParentName = folderInParent.getName().getBaseName();
        if (folderInParent.getType().equals(FileType.FOLDER)
                && (!folderInParentName.equals(target.getName().getBaseName()))
                && (!candidateBlacklist.contains(folderInParentName))) {
            versionFolders.add(folderInParent);
        }//from   ww w.ja  v  a  2 s  .  com
    }

    List<FileObject> validatedVersionFolders = validateVersionFolders(versionFolders);

    if (!validatedVersionFolders.isEmpty()) {

        List<String> versions = new ArrayList<>();
        for (FileObject validatedVersionFolder : validatedVersionFolders) {
            String baseName = validatedVersionFolder.getName().getBaseName();
            versions.add(baseName);
        }

        final Comparator<String> comp = new VersionComparator();
        String latestVersion = Collections.max(versions, comp);
        FileObject latestVersionFolder = null;
        for (FileObject validatedVersionFolder : validatedVersionFolders) {
            if (validatedVersionFolder.getName().getBaseName().equals(latestVersion)) {
                latestVersionFolder = validatedVersionFolder;
            }
        }
        return latestVersionFolder;
    } else {
        return null;
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImpl.java

/**
 * Move files from the source folder to the destination folder, overwriting any files that may already exist there.
 *
 * @param fs           File system to write to
 * @param dest         Destination to move source file/folder into
 * @param pluginFolder Big Data plugin folder
 * @throws KettleFileException/*  w w w .j a va 2  s  .co m*/
 * @throws IOException
 */
private void stageBigDataPlugin(FileSystem fs, Path dest, FileObject pluginFolder)
        throws KettleFileException, IOException {
    Path pluginsDir = new Path(dest, PATH_PLUGINS);
    Path libDir = new Path(dest, PATH_LIB);
    Path bigDataPluginDir = new Path(pluginsDir, pluginFolder.getName().getBaseName());

    // Stage everything except the hadoop-configurations and pmr libraries
    for (FileObject f : pluginFolder.findFiles(new FileDepthSelector(1, 1))) {
        if (!"hadoop-configurations".equals(f.getName().getBaseName())
                && !"pentaho-mapreduce-libraries.zip".equals(f.getName().getBaseName())) {
            stageForCache(f, fs, new Path(bigDataPluginDir, f.getName().getBaseName()), true, false);
        }
    }

    // Stage the current Hadoop configuration without its client-only or pmr libraries (these will be copied into the
    // lib dir)
    Path hadoopConfigDir = new Path(new Path(bigDataPluginDir, "hadoop-configurations"),
            configuration.getIdentifier());
    for (FileObject f : configuration.getLocation().findFiles(new FileSelector() {
        @Override
        public boolean includeFile(FileSelectInfo info) throws Exception {
            return FileType.FILE.equals(info.getFile().getType());
        }

        @Override
        public boolean traverseDescendents(FileSelectInfo info) throws Exception {
            String name = info.getFile().getName().getBaseName();
            return !((PATH_PMR.equals(name) || PATH_CLIENT.equals(name))
                    && PATH_LIB.equals(info.getFile().getParent().getName().getBaseName()));
        }
    })) {
        // Create relative path to write to
        String relPath = configuration.getLocation().getName().getRelativeName(f.getName());
        stageForCache(f, fs, new Path(hadoopConfigDir, relPath), true, false);
    }

    // Stage all pmr libraries for the Hadoop configuration into the root library path for the Kettle environment
    for (FileObject f : configuration.getLocation().resolveFile(PATH_LIB).resolveFile(PATH_PMR)
            .findFiles(new FileTypeSelector(FileType.FILE))) {
        stageForCache(f, fs, new Path(libDir, f.getName().getBaseName()), true, false);
    }
}

From source file:org.pentaho.metaverse.impl.VfsLineageCollector.java

@Override
public List<String> listArtifacts(final String startingDate, final String endingDate)
        throws IllegalArgumentException {
    List<String> paths = new ArrayList<>();
    try {/*from  ww w. j ava  2s  .com*/
        FileSystemOptions opts = new FileSystemOptions();
        FileObject lineageRootFolder = KettleVFS.getFileObject(getOutputFolder(), opts);

        FileSelector dateRangeFilter = new VfsDateRangeFilter(format, startingDate, endingDate);
        FileSelector depthFilter = new FileDepthSelector(1, 256);

        if (lineageRootFolder.exists() && lineageRootFolder.getType() == FileType.FOLDER) {
            // get the folders that come on or after the startingDate
            FileObject[] dayFolders = lineageRootFolder.findFiles(dateRangeFilter);
            for (FileObject dayFolder : dayFolders) {
                FileObject[] listThisFolder = dayFolder.findFiles(depthFilter);
                for (FileObject currentFile : listThisFolder) {
                    if (currentFile.getType() == FileType.FILE) {
                        paths.add(currentFile.getName().getPath());
                    }
                }
            }
        }
        return paths;
    } catch (Exception e) {
        throw new IllegalArgumentException(e);
    }
}

From source file:org.pentaho.metaverse.impl.VfsLineageCollector.java

@Override
public List<String> listArtifactsForFile(String pathToArtifact, String startingDate, String endingDate)
        throws IllegalArgumentException {
    List<String> paths = new ArrayList<>();

    try {/*w ww. j a va2s.c o m*/
        FileSystemOptions opts = new FileSystemOptions();
        FileObject lineageRootFolder = KettleVFS.getFileObject(getOutputFolder(), opts);

        FileSelector dateRangeFilter = new VfsDateRangeFilter(format, startingDate, endingDate);
        FileSelector depthFilter = new FileDepthSelector(1, 256);

        if (lineageRootFolder.exists() && lineageRootFolder.getType() == FileType.FOLDER) {

            // get all of the date folders of lineage we have
            FileObject[] dayFolders = lineageRootFolder.findFiles(dateRangeFilter);
            for (FileObject dayFolder : dayFolders) {
                FileObject[] listThisFolder = dayFolder.findFiles(depthFilter);
                for (FileObject currentFile : listThisFolder) {
                    FileObject requested = currentFile.resolveFile(pathToArtifact);
                    if (requested.exists() && requested.getType() == FileType.FOLDER) {
                        FileObject[] requestedChildren = requested.getChildren();
                        for (FileObject requestedChild : requestedChildren) {
                            if (requestedChild.getType() == FileType.FILE) {
                                paths.add(requestedChild.getName().getPath());
                            }
                        }
                    }
                }
            }
        }
        return paths;
    } catch (Exception e) {
        throw new IllegalArgumentException(e);
    }
}

From source file:sf.net.experimaestro.manager.js.JSFileObject.java

@JSFunction
public void copy_to(@JSArgument(name = "destination") JSFileObject destination) throws FileSystemException {
    destination.file.copyFrom(file, new FileDepthSelector(0, 0));
}