Example usage for org.apache.commons.vfs2 FileObject findFiles

List of usage examples for org.apache.commons.vfs2 FileObject findFiles

Introduction

In this page you can find the example usage for org.apache.commons.vfs2 FileObject findFiles.

Prototype

FileObject[] findFiles(FileSelector selector) throws FileSystemException;

Source Link

Document

Finds the set of matching descendants of this file, in depthwise order.

Usage

From source file:org.fuin.vfs2.filter.examples.WildcardFileFilterExample.java

public static void main(String[] args) throws Exception {

    // Example, to retrieve and print all java files that have the
    // expression test in the name in the current directory
    FileSystemManager fsManager = VFS.getManager();
    FileObject dir = fsManager.toFileObject(new File("."));
    FileObject[] files = dir.findFiles(new FileFilterSelector(new WildcardFileFilter("*test*.java")));
    for (int i = 0; i < files.length; i++) {
        System.out.println(files[i]);
    }//  w ww.jav  a2  s.co  m

}

From source file:org.kalypso.commons.io.VFSUtilities.java

/**
 * Moves the complete content of one directory into another.
 *
 * @throws IOException/*ww  w. j av a  2 s. co  m*/
 *           If the move failed.
 */
public static void moveContents(final File sourceDir, final File dest) throws IOException {
    final FileSystemManager vfsManager = VFSUtilities.getManager();
    final FileObject source = vfsManager.toFileObject(sourceDir);
    final FileObject destDir = vfsManager.toFileObject(dest);

    final FileObject[] findFiles = source.findFiles(new AllFileSelector());
    // Might happen, if source does not exists... shouldn't we check this?
    if (findFiles == null)
        return;

    for (final FileObject fileObject : findFiles) {
        if (FileType.FILE.equals(fileObject.getType())) {
            final String relPath = source.getName().getRelativeName(fileObject.getName());
            final FileObject destFile = destDir.resolveFile(relPath, NameScope.DESCENDENT_OR_SELF);
            final FileObject folder = destFile.getParent();
            folder.createFolder();
            fileObject.moveTo(destFile);
        }
    }
}

From source file:org.obiba.opal.shell.commands.ImportCommand.java

private Collection<FileObject> getFilesInFolder(FileObject file) throws FileSystemException {
    FileObject[] filesInDir = file.findFiles(new FileSelector() {
        @Override//from w w  w.java 2s.  c om
        public boolean traverseDescendents(FileSelectInfo file) throws Exception {
            return true;
        }

        @Override
        public boolean includeFile(FileSelectInfo file) throws Exception {
            return file.getFile().getType() == FileType.FILE
                    && "zip".equals(file.getFile().getName().getExtension().toLowerCase());
        }
    });
    return Arrays.asList(filesInDir);
}

From source file:org.onehippo.forge.content.exim.core.impl.AbstractContentMigrationTask.java

/**
 * Finds files (type of {@link FileObject}) under {@code baseFolder} based on the file name pattern specified by the regular expression, {@code nameRegex}.
 * It only matches when the depth of a descendant file in the range, [{@code minDepth}, {@code maxDepth}].
 * @param baseFolder base folder to search from
 * @param nameRegex file name pattern regular expression
 * @param minDepth minimum depth of a descendant file
 * @param maxDepth maximum depth of a descendant file
 * @return array of files (type of {@link FileObject}) found
 * @throws FileSystemException if any file system exception occurs
 *///from  w  w w . j ava  2  s  .  c om
public FileObject[] findFilesByNamePattern(FileObject baseFolder, String nameRegex, int minDepth, int maxDepth)
        throws FileSystemException {
    final FileFilter fileFilter = new NamePatternFileFilter(Pattern.compile(nameRegex));
    final FileSelector selector = new FileFilterDepthSelector(fileFilter, minDepth, maxDepth);
    return baseFolder.findFiles(selector);
}

From source file:org.ow2.proactive.scheduler.smartproxy.SmartProxyImpl.java

@Override
protected void downloadTaskOutputFiles(AwaitedJob awaitedjob, String jobId, String t_name, String localFolder)
        throws Exception {
    AwaitedTask atask = awaitedjob.getAwaitedTask(t_name);
    if (atask == null) {
        throw new IllegalArgumentException(
                "The task " + t_name + " does not belong to job " + jobId + " or has already been removed");
    }//ww w .  j  ava 2 s  . c  om
    if (atask.isTransferring()) {
        log.warn("The task " + t_name + " of job " + jobId + " is already transferring its output");
        return;
    }
    String pull_URL = awaitedjob.getPullURL();

    if (awaitedjob.isIsolateTaskOutputs()) {
        pull_URL = pull_URL.replace(SchedulerConstants.TASKID_DIR_DEFAULT_NAME,
                SchedulerConstants.TASKID_DIR_DEFAULT_NAME + "/" + atask.getTaskId());
    }

    FileObject remotePullFolderFO;
    FileObject localfolderFO;

    try {
        remotePullFolderFO = jobTracker.resolveFile(pull_URL);

        localfolderFO = jobTracker.resolveFile(localFolder);
    } catch (FileSystemException e) {
        log.error("Could not retrieve data for job " + jobId, e);
        throw new IllegalStateException("Could not retrieve data for job " + jobId, e);
    }

    String sourceUrl = remotePullFolderFO.getURL().toString();
    String destUrl = localfolderFO.getURL().toString();

    org.objectweb.proactive.extensions.dataspaces.vfs.selector.FileSelector fileSelector = new org.objectweb.proactive.extensions.dataspaces.vfs.selector.FileSelector();

    List<OutputSelector> ouputFileSelectors = atask.getOutputSelectors();
    for (OutputSelector os : ouputFileSelectors) {
        org.objectweb.proactive.extensions.dataspaces.vfs.selector.FileSelector fs = os.getOutputFiles();
        if (!fs.getIncludes().isEmpty()) {
            fileSelector.addIncludes(fs.getIncludes());
        }

        if (!fs.getExcludes().isEmpty()) {
            fileSelector.addExcludes(fs.getExcludes());
        }
    }

    if (log.isDebugEnabled()) {
        log.debug("Looking at files in " + sourceUrl + " with " + fileSelector.getIncludes() + "-"
                + fileSelector.getExcludes());
        boolean goon = true;
        int cpt = 0;
        FileObject[] fos = null;
        while (goon) {
            fos = remotePullFolderFO.findFiles(fileSelector);
            goon = cpt < 50 && (fos == null || fos.length == 0);
            cpt++;
            if (goon) {
                try {
                    Thread.sleep(100);
                } catch (InterruptedException e) {
                    Thread.currentThread().interrupt();
                }
            }
        }

        if (fos != null && fos.length > 0) {
            for (FileObject fo : fos) {
                log.debug("Found " + fo.getName());
            }
        } else {
            log.warn("Couldn't find " + fileSelector.getIncludes() + "-" + fileSelector.getExcludes() + " in "
                    + sourceUrl);
        }
    }
    if (awaitedjob.isAutomaticTransfer()) {
        DataTransferProcessor dtp = new DataTransferProcessor(remotePullFolderFO, localfolderFO, jobId, t_name,
                fileSelector);
        jobTracker.setTaskTransferring(jobId, t_name, true);
        threadPool.submit((Runnable) dtp);
    } else {
        log.debug("Copying files from " + sourceUrl + " to " + destUrl);

        try {
            localfolderFO.copyFrom(remotePullFolderFO, fileSelector);
        } catch (FileSystemException e) {
            log.error(e);
            throw e;
        } finally {
            jobTracker.setTaskTransferring(jobId, t_name, false);
            jobTracker.removeAwaitedTask(jobId, t_name);
        }

        log.debug("Finished copying files from " + sourceUrl + " to " + destUrl);
        // ok we can remove the task
    }
}

From source file:org.pentaho.di.bigdata.ShimDependentJobEntryPluginType.java

@Override
public List<PluginFolderInterface> getPluginFolders() {
    return Arrays
            .<PluginFolderInterface>asList(new PluginFolder(
                    new File(ShimDependentJobEntryPluginType.class.getProtectionDomain().getCodeSource()
                            .getLocation().getPath()).getParentFile().toURI().toString() + "plugins/",
                    false, true) {//from ww  w . jav a  2s  .  co m
                @Override
                public FileObject[] findJarFiles(final boolean includeLibJars) throws KettleFileException {
                    try {
                        // Find all the jar files in this folder...
                        //
                        FileObject folderObject = KettleVFS.getFileObject(this.getFolder());
                        FileObject[] fileObjects = folderObject.findFiles(new FileSelector() {
                            @Override
                            public boolean traverseDescendents(FileSelectInfo fileSelectInfo) throws Exception {
                                FileObject fileObject = fileSelectInfo.getFile();
                                String folder = fileObject.getName().getBaseName();
                                return includeLibJars || !"lib".equals(folder);
                            }

                            @Override
                            public boolean includeFile(FileSelectInfo fileSelectInfo) throws Exception {
                                return fileSelectInfo.getFile().toString().endsWith(".jar");
                            }
                        });

                        return fileObjects;
                    } catch (Exception e) {
                        throw new KettleFileException(
                                "Unable to list jar files in plugin folder '" + toString() + "'", e);
                    }
                }
            });
}

From source file:org.pentaho.di.repository.KettleDatabaseRepositoryIT.java

protected void verifyJobSamples(RepositoryDirectoryInterface samplesDirectory) throws Exception {
    FileObject jobSamplesFolder = KettleVFS.getFileObject("samples/jobs/");
    FileObject[] files = jobSamplesFolder.findFiles(new FileSelector() {

        @Override// w w  w . ja  va 2s.  c o m
        public boolean traverseDescendents(FileSelectInfo arg0) throws Exception {
            return true;
        }

        @Override
        public boolean includeFile(FileSelectInfo info) throws Exception {
            return info.getFile().getName().getExtension().equalsIgnoreCase("kjb");
        }
    });

    List<FileObject> filesList = Arrays.asList(files);
    Collections.sort(filesList, new Comparator<FileObject>() {
        @Override
        public int compare(FileObject o1, FileObject o2) {
            return o1.getName().getPath().compareTo(o2.getName().getPath());
        }
    });

    // test the storage of jobMeta attributes in the Kettle DB Repo
    if (filesList.size() > 0) {
        FileObject file = filesList.get(0);
        String jobFilename = file.getName().getPath();
        System.out.println("Storing/Loading/validating job attributes");

        // Load the JobMeta object...
        //
        JobMeta jobMeta = new JobMeta(jobFilename, repository);
        // set some attributes
        jobMeta.setAttribute("group", "key", "value");
        jobMeta.setAttribute("test-group", "test-key-1", "test-value");
        jobMeta.setAttribute("test-group", "test-key-2", "test-value");
        jobMeta.setAttribute("test-group", "test-key-3", "test-value-3");

        // Save it in the repository in the samples folder
        //
        jobMeta.setRepositoryDirectory(samplesDirectory);
        repository.save(jobMeta, "unit testing");
        assertNotNull(jobMeta.getObjectId());

        // Load it back up again...
        //
        JobMeta repJobMeta = repository.loadJob(jobMeta.getObjectId(), null);
        String value = repJobMeta.getAttribute("group", "key");
        String value1 = repJobMeta.getAttribute("test-group", "test-key-1");
        String value2 = repJobMeta.getAttribute("test-group", "test-key-2");
        String value3 = repJobMeta.getAttribute("test-group", "test-key-3");
        assertEquals("value", value);
        assertEquals("test-value", value1);
        assertEquals("test-value", value2);
        assertEquals("test-value-3", value3);
    }

    for (FileObject file : filesList) {
        String jobFilename = file.getName().getPath();
        System.out.println("Storing/Loading/validating job '" + jobFilename + "'");

        // Load the JobMeta object...
        //
        JobMeta jobMeta = new JobMeta(jobFilename, repository);
        if (Utils.isEmpty(jobMeta.getName())) {
            jobMeta.setName(Const.createName(file.getName().getBaseName()));
        }

        // Save it in the repository in the samples folder
        //
        jobMeta.setRepositoryDirectory(samplesDirectory);
        repository.save(jobMeta, "unit testing");
        assertNotNull(jobMeta.getObjectId());

        // Load it back up again...
        //
        JobMeta repJobMeta = repository.loadJob(jobMeta.getObjectId(), null);
        String oneXml = repJobMeta.getXML();

        // Save & load it again
        //
        repository.save(jobMeta, "unit testing");
        repJobMeta = repository.loadJob(jobMeta.getObjectId(), null);
        String twoXml = repJobMeta.getXML();

        // The XML needs to be identical after loading
        //
        // storeFile(oneXml, "/tmp/one.ktr");
        // storeFile(twoXml, "/tmp/two.ktr");
        //
        assertEquals(oneXml, twoXml);
    }

    // Verify the number of stored files, see if we can find them all again.
    //
    System.out.println("Stored " + files.length + " job samples in folder " + samplesDirectory.getPath());
    String[] jobNames = repository.getJobNames(samplesDirectory.getObjectId(), false);
    assertEquals(files.length, jobNames.length);
}

From source file:org.pentaho.di.repository.KettleFileRepositoryIT.java

private void verifyJobSamples(RepositoryDirectoryInterface samplesDirectory) throws Exception {
    FileObject jobSamplesFolder = KettleVFS.getFileObject("samples/jobs/");
    FileObject[] files = jobSamplesFolder.findFiles(new FileSelector() {

        @Override/*from   ww w.j  a va 2  s.c  o m*/
        public boolean traverseDescendents(FileSelectInfo arg0) throws Exception {
            return true;
        }

        @Override
        public boolean includeFile(FileSelectInfo info) throws Exception {
            return info.getFile().getName().getExtension().equalsIgnoreCase("kjb");
        }
    });

    List<FileObject> filesList = Arrays.asList(files);
    Collections.sort(filesList, new Comparator<FileObject>() {
        @Override
        public int compare(FileObject o1, FileObject o2) {
            return o1.getName().getPath().compareTo(o2.getName().getPath());
        }
    });

    for (FileObject file : filesList) {
        String jobFilename = file.getName().getPath();
        System.out.println("Storing/Loading/validating job '" + jobFilename + "'");

        // Load the JobMeta object...
        //
        JobMeta jobMeta = new JobMeta(jobFilename, repository);
        jobMeta.setFilename(null);

        // The name is sometimes empty in the file, duplicates are present too...
        // Replaces slashes and the like as well...
        //
        jobMeta.setName(Const.createName(file.getName().getBaseName()));
        jobMeta.setName(jobMeta.getName().replace('/', '-'));

        if (Utils.isEmpty(jobMeta.getName())) {
            jobMeta.setName(Const.createName(file.getName().getBaseName()));
        }
        if (jobMeta.getName().contains("/")) {
            jobMeta.setName(jobMeta.getName().replace('/', '-'));
        }

        // Save it in the repository in the samples folder
        //
        jobMeta.setRepositoryDirectory(samplesDirectory);
        repository.save(jobMeta, "unit testing");
        assertNotNull(jobMeta.getObjectId());

        // Load it back up again...
        //
        JobMeta repJobMeta = repository.loadJob(jobMeta.getObjectId(), null);
        String oneXml = repJobMeta.getXML();

        // Save & load it again
        //
        repository.save(jobMeta, "unit testing");
        repJobMeta = repository.loadJob(jobMeta.getObjectId(), null);
        String twoXml = repJobMeta.getXML();

        // The XML needs to be identical after loading
        //
        // storeFile(oneXml, "/tmp/one.ktr");
        // storeFile(twoXml, "/tmp/two.ktr");
        //
        assertEquals(oneXml, twoXml);
    }

    // Verify the number of stored files, see if we can find them all again.
    //
    System.out.println("Stored " + files.length + " job samples in folder " + samplesDirectory.getPath());
    String[] jobNames = repository.getJobNames(samplesDirectory.getObjectId(), false);
    assertEquals(files.length, jobNames.length);
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImpl.java

/**
 * Move files from the source folder to the destination folder, overwriting any files that may already exist there.
 *
 * @param fs           File system to write to
 * @param dest         Destination to move source file/folder into
 * @param pluginFolder Big Data plugin folder
 * @throws KettleFileException// ww w  .j  av a  2  s.c  o  m
 * @throws IOException
 */
private void stageBigDataPlugin(FileSystem fs, Path dest, FileObject pluginFolder)
        throws KettleFileException, IOException {
    Path pluginsDir = new Path(dest, PATH_PLUGINS);
    Path libDir = new Path(dest, PATH_LIB);
    Path bigDataPluginDir = new Path(pluginsDir, pluginFolder.getName().getBaseName());

    // Stage everything except the hadoop-configurations and pmr libraries
    for (FileObject f : pluginFolder.findFiles(new FileDepthSelector(1, 1))) {
        if (!"hadoop-configurations".equals(f.getName().getBaseName())
                && !"pentaho-mapreduce-libraries.zip".equals(f.getName().getBaseName())) {
            stageForCache(f, fs, new Path(bigDataPluginDir, f.getName().getBaseName()), true, false);
        }
    }

    // Stage the current Hadoop configuration without its client-only or pmr libraries (these will be copied into the
    // lib dir)
    Path hadoopConfigDir = new Path(new Path(bigDataPluginDir, "hadoop-configurations"),
            configuration.getIdentifier());
    for (FileObject f : configuration.getLocation().findFiles(new FileSelector() {
        @Override
        public boolean includeFile(FileSelectInfo info) throws Exception {
            return FileType.FILE.equals(info.getFile().getType());
        }

        @Override
        public boolean traverseDescendents(FileSelectInfo info) throws Exception {
            String name = info.getFile().getName().getBaseName();
            return !((PATH_PMR.equals(name) || PATH_CLIENT.equals(name))
                    && PATH_LIB.equals(info.getFile().getParent().getName().getBaseName()));
        }
    })) {
        // Create relative path to write to
        String relPath = configuration.getLocation().getName().getRelativeName(f.getName());
        stageForCache(f, fs, new Path(hadoopConfigDir, relPath), true, false);
    }

    // Stage all pmr libraries for the Hadoop configuration into the root library path for the Kettle environment
    for (FileObject f : configuration.getLocation().resolveFile(PATH_LIB).resolveFile(PATH_PMR)
            .findFiles(new FileTypeSelector(FileType.FILE))) {
        stageForCache(f, fs, new Path(libDir, f.getName().getBaseName()), true, false);
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImpl.java

/**
 * Recursively searches for all files starting at the directory provided with the extension provided. If no extension
 * is provided all files will be returned.
 *
 * @param root      Directory to start the search for files in
 * @param extension File extension to search for. If null all files will be returned.
 * @return List of absolute path names to all files found in {@code dir} and its subdirectories.
 * @throws KettleFileException//ww w .  java 2 s  . com
 * @throws FileSystemException
 */
@SuppressWarnings("unchecked")
public List<String> findFiles(FileObject root, final String extension) throws FileSystemException {
    FileObject[] files = root.findFiles(new FileSelector() {
        @Override
        public boolean includeFile(FileSelectInfo fileSelectInfo) throws Exception {
            return extension == null || extension.equals(fileSelectInfo.getFile().getName().getExtension());
        }

        @Override
        public boolean traverseDescendents(FileSelectInfo fileSelectInfo) throws Exception {
            return FileType.FOLDER.equals(fileSelectInfo.getFile().getType());
        }
    });

    if (files == null) {
        return Collections.EMPTY_LIST;
    }

    List<String> paths = new ArrayList<String>();
    for (FileObject file : files) {
        try {
            paths.add(file.getURL().toURI().getPath());
        } catch (URISyntaxException ex) {
            throw new FileSystemException("Error getting URI of file: " + file.getURL().getPath());
        }
    }
    return paths;
}