Example usage for org.apache.commons.vfs2 FileTypeSelector FileTypeSelector

List of usage examples for org.apache.commons.vfs2 FileTypeSelector FileTypeSelector

Introduction

In this page you can find the example usage for org.apache.commons.vfs2 FileTypeSelector FileTypeSelector.

Prototype

public FileTypeSelector(final FileType type) 

Source Link

Document

Creates a new selector for the given file type.

Usage

From source file:de.unioninvestment.portal.explorer.view.vfs.VFSMainView.java

public void scanDirectory(FileSystemManager fsManager, FileSystemOptions opts, String ftpconn)
        throws IOException {
    try {//w ww .  ja va  2  s .co m
        FileObject fileObject = fsManager.resolveFile(ftpconn, opts);
        FileObject[] files = fileObject.findFiles(new FileTypeSelector(FileType.FOLDER));
        HashMap<String, String> parentMap = new HashMap<String, String>();
        for (FileObject fo : files) {
            String objectName = fo.getName().toString();
            tree.addItem(objectName);
            tree.setItemIcon(objectName, FOLDER);
            if (fo.getParent() != null) {
                String parentName = fo.getParent().getName().toString();
                parentMap.put(objectName, parentName);
            } else
                tree.setItemCaption(objectName, "/");
        }

        // set parents
        logger.log(Level.INFO, "parentMap " + parentMap.size());
        if (parentMap.size() > 0) {
            Iterator<Map.Entry<String, String>> it = parentMap.entrySet().iterator();
            while (it.hasNext()) {
                Map.Entry<String, String> pairs = it.next();
                tree.setParent(pairs.getKey(), pairs.getValue());
                String caption = pairs.getKey().toString().substring(pairs.getValue().toString().length());

                tree.setItemCaption(pairs.getKey(), removeSlash(caption));
                it.remove();
            }
        }
    } catch (FileSystemException e) {
        e.printStackTrace();
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImpl.java

/**
 * Move files from the source folder to the destination folder, overwriting any files that may already exist there.
 *
 * @param fs           File system to write to
 * @param dest         Destination to move source file/folder into
 * @param pluginFolder Big Data plugin folder
 * @throws KettleFileException//from  ww w  .  j  a va2 s  . c o m
 * @throws IOException
 */
private void stageBigDataPlugin(FileSystem fs, Path dest, FileObject pluginFolder)
        throws KettleFileException, IOException {
    Path pluginsDir = new Path(dest, PATH_PLUGINS);
    Path libDir = new Path(dest, PATH_LIB);
    Path bigDataPluginDir = new Path(pluginsDir, pluginFolder.getName().getBaseName());

    // Stage everything except the hadoop-configurations and pmr libraries
    for (FileObject f : pluginFolder.findFiles(new FileDepthSelector(1, 1))) {
        if (!"hadoop-configurations".equals(f.getName().getBaseName())
                && !"pentaho-mapreduce-libraries.zip".equals(f.getName().getBaseName())) {
            stageForCache(f, fs, new Path(bigDataPluginDir, f.getName().getBaseName()), true, false);
        }
    }

    // Stage the current Hadoop configuration without its client-only or pmr libraries (these will be copied into the
    // lib dir)
    Path hadoopConfigDir = new Path(new Path(bigDataPluginDir, "hadoop-configurations"),
            configuration.getIdentifier());
    for (FileObject f : configuration.getLocation().findFiles(new FileSelector() {
        @Override
        public boolean includeFile(FileSelectInfo info) throws Exception {
            return FileType.FILE.equals(info.getFile().getType());
        }

        @Override
        public boolean traverseDescendents(FileSelectInfo info) throws Exception {
            String name = info.getFile().getName().getBaseName();
            return !((PATH_PMR.equals(name) || PATH_CLIENT.equals(name))
                    && PATH_LIB.equals(info.getFile().getParent().getName().getBaseName()));
        }
    })) {
        // Create relative path to write to
        String relPath = configuration.getLocation().getName().getRelativeName(f.getName());
        stageForCache(f, fs, new Path(hadoopConfigDir, relPath), true, false);
    }

    // Stage all pmr libraries for the Hadoop configuration into the root library path for the Kettle environment
    for (FileObject f : configuration.getLocation().resolveFile(PATH_LIB).resolveFile(PATH_PMR)
            .findFiles(new FileTypeSelector(FileType.FILE))) {
        stageForCache(f, fs, new Path(libDir, f.getName().getBaseName()), true, false);
    }
}