Example usage for org.apache.commons.vfs AllFileSelector AllFileSelector

List of usage examples for org.apache.commons.vfs AllFileSelector AllFileSelector

Introduction

In this page you can find the example usage for org.apache.commons.vfs AllFileSelector AllFileSelector.

Prototype

AllFileSelector

Source Link

Usage

From source file:org.pentaho.di.job.entries.hadooptransjobexecutor.DistributedCacheUtil.java

/**
 * Delete a directory and all of its contents
 *
 * @param dir Directory to delete//w  w w . j av a  2  s . co  m
 * @return True if the directory was deleted successfully
 */
public boolean deleteDirectory(FileObject dir) throws FileSystemException {
    dir.delete(new AllFileSelector());
    return !dir.exists();
}

From source file:org.pentaho.di.job.entries.hadooptransjobexecutor.DistributedCacheUtilTest.java

@Test
public void extractToTemp() throws Exception {
    DistributedCacheUtil ch = new DistributedCacheUtil();

    FileObject archive = KettleVFS.getFileObject("test-res/pentaho-mapreduce-sample.jar");
    FileObject extracted = ch.extractToTemp(archive);

    assertNotNull(extracted);//w  ww  .  j a va 2 s  .  com
    assertTrue(extracted.exists());
    try {
        // There should be 3 files and 5 directories inside the root folder (which is the 9th entry)
        assertTrue(extracted.findFiles(new AllFileSelector()).length == 9);
    } finally {
        // clean up after ourself
        ch.deleteDirectory(extracted);
    }
}

From source file:org.pentaho.di.job.entries.hadooptransjobexecutor.DistributedCacheUtilTest.java

@Test
public void findFiles_vfs() throws Exception {
    DistributedCacheUtil ch = new DistributedCacheUtil();

    FileObject testFolder = createTestFolderWithContent();

    try {// w  w w. jav  a2  s .  co  m
        // Simply test we can find the jar files in our test folder
        List<String> jars = ch.findFiles(testFolder, "jar");
        assertEquals(2, jars.size());

        // Look for all files and folders
        List<String> all = ch.findFiles(testFolder, null);
        assertEquals(5, all.size());
    } finally {
        testFolder.delete(new AllFileSelector());
    }
}

From source file:org.pentaho.di.job.entries.hadooptransjobexecutor.DistributedCacheUtilTest.java

@Test
public void findFiles_vfs_hdfs() throws Exception {

    // Stage files then make sure we can find them in HDFS
    DistributedCacheUtil ch = new DistributedCacheUtil();
    Configuration conf = new Configuration();
    org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.getLocal(conf);
    HDFSFileSystem.setMockHDFSFileSystem(fs);

    // Must use absolute paths so the HDFS VFS FileSystem can resolve the URL properly (can't do relative paths when
    // using KettleVFS.getFileObject() within HDFS)
    Path root = new Path(KettleVFS.getFileObject(".").getURL().getPath() + "/bin/test/findFiles_hdfs");
    Path dest = new Path(root, "org/pentaho/mapreduce/");

    FileObject hdfsDest = KettleVFS.getFileObject("hdfs://localhost/" + dest.toString());

    // Copy the contents of test folder
    FileObject source = createTestFolderWithContent();

    try {//w  w w.j ava2 s .c  o m
        try {
            ch.stageForCache(source, fs, dest, true);

            List<String> files = ch.findFiles(hdfsDest, null);
            assertEquals(5, files.size());
        } finally {
            fs.delete(root, true);
        }
    } finally {
        source.delete(new AllFileSelector());
    }
}

From source file:org.pentaho.di.job.entries.hadooptransjobexecutor.DistributedCacheUtilTest.java

@Test
public void findFiles_hdfs_native() throws Exception {
    DistributedCacheUtil ch = new DistributedCacheUtil();

    // Copy the contents of test folder
    FileObject source = createTestFolderWithContent();
    Path root = new Path("bin/test/stageArchiveForCacheTest");
    Configuration conf = new Configuration();
    org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.getLocal(conf);
    Path dest = new Path(root, "org/pentaho/mapreduce/");
    try {//from  w  ww  .  j  a  va  2 s .c  o  m
        try {
            ch.stageForCache(source, fs, dest, true);

            List<Path> files = ch.findFiles(fs, dest, null);
            assertEquals(3, files.size());

            files = ch.findFiles(fs, dest, Pattern.compile(".*jar$"));
            assertEquals(2, files.size());

            files = ch.findFiles(fs, dest, Pattern.compile(".*folder$"));
            assertEquals(1, files.size());
        } finally {
            fs.delete(root, true);
        }
    } finally {
        source.delete(new AllFileSelector());
    }
}

From source file:org.pentaho.di.job.entries.hadooptransjobexecutor.DistributedCacheUtilTest.java

@Test
public void stageForCache() throws Exception {
    DistributedCacheUtil ch = new DistributedCacheUtil();

    // Copy the contents of test folder
    FileObject source = createTestFolderWithContent();

    try {//from  ww  w  . j  a v a2 s  .  c  om
        Path root = new Path("bin/test/stageArchiveForCacheTest");
        Path dest = new Path(root, "org/pentaho/mapreduce/");

        Configuration conf = new Configuration();
        org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.getLocal(conf);

        stageForCacheTester(ch, source, fs, root, dest, 3, 2);
    } finally {
        source.delete(new AllFileSelector());
    }
}

From source file:org.pentaho.di.job.entries.hadooptransjobexecutor.DistributedCacheUtilTest.java

@Test
public void stageForCache_destination_no_overwrite() throws Exception {
    DistributedCacheUtil ch = new DistributedCacheUtil();

    Configuration conf = new Configuration();
    org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.getLocal(conf);

    FileObject source = createTestFolderWithContent();
    try {//from w  ww  .  ja  va2 s. com
        Path root = new Path("bin/test/stageForCache_destination_exists");
        Path dest = new Path(root, "dest");

        fs.mkdirs(dest);
        assertTrue(fs.exists(dest));
        assertTrue(fs.getFileStatus(dest).isDir());
        try {
            ch.stageForCache(source, fs, dest, false);
        } catch (KettleFileException ex) {
            assertTrue(ex.getMessage().contains("Destination exists"));
        } finally {
            fs.delete(root, true);
        }
    } finally {
        source.delete(new AllFileSelector());
    }
}

From source file:org.pentaho.di.job.entries.hadooptransjobexecutor.DistributedCacheUtilTest.java

@Test
public void stageForCache_destination_exists() throws Exception {
    DistributedCacheUtil ch = new DistributedCacheUtil();

    Configuration conf = new Configuration();
    org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.getLocal(conf);

    FileObject source = createTestFolderWithContent();
    try {//from   ww w  . j a  v  a2s  . c o m
        Path root = new Path("bin/test/stageForCache_destination_exists");
        Path dest = new Path(root, "dest");

        fs.mkdirs(dest);
        assertTrue(fs.exists(dest));
        assertTrue(fs.getFileStatus(dest).isDir());

        stageForCacheTester(ch, source, fs, root, dest, 3, 2);
    } finally {
        source.delete(new AllFileSelector());
    }
}

From source file:org.pentaho.di.job.entries.hadooptransjobexecutor.DistributedCacheUtilTest.java

@Test
public void installKettleEnvironment() throws Exception {
    DistributedCacheUtil ch = new DistributedCacheUtil();

    Configuration conf = new Configuration();
    org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.getLocal(conf);

    // This "empty pmr" contains a lib/ folder but with no content
    FileObject pmrArchive = KettleVFS.getFileObject("test-res/empty-pmr.zip");

    FileObject bigDataPluginDir = createTestFolderWithContent(
            DistributedCacheUtil.PENTAHO_BIG_DATA_PLUGIN_FOLDER_NAME);

    Path root = new Path("bin/test/installKettleEnvironment");
    try {// w  w w  . ja  v  a2  s .  c om
        ch.installKettleEnvironment(pmrArchive, fs, root, bigDataPluginDir, null);
        assertTrue(ch.isKettleEnvironmentInstalledAt(fs, root));
    } finally {
        bigDataPluginDir.delete(new AllFileSelector());
        fs.delete(root, true);
    }
}

From source file:org.pentaho.di.job.entries.hadooptransjobexecutor.DistributedCacheUtilTest.java

@Test
public void installKettleEnvironment_additional_plugins() throws Exception {
    DistributedCacheUtil ch = new DistributedCacheUtil();

    Configuration conf = new Configuration();
    org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.getLocal(conf);

    // This "empty pmr" contains a lib/ folder but with no content
    FileObject pmrArchive = KettleVFS.getFileObject("test-res/empty-pmr.zip");

    FileObject bigDataPluginDir = createTestFolderWithContent(
            DistributedCacheUtil.PENTAHO_BIG_DATA_PLUGIN_FOLDER_NAME);
    FileObject samplePluginDir = createTestFolderWithContent("sample-plugin");

    Path root = new Path("bin/test/installKettleEnvironment");
    try {/*ww w  .  j a v a 2 s  .  co m*/
        ch.installKettleEnvironment(pmrArchive, fs, root, bigDataPluginDir, Arrays.asList(samplePluginDir));
        assertTrue(ch.isKettleEnvironmentInstalledAt(fs, root));
        assertTrue(fs.exists(new Path(root, "plugins/sample-plugin")));
    } finally {
        bigDataPluginDir.delete(new AllFileSelector());
        samplePluginDir.delete(new AllFileSelector());
        fs.delete(root, true);
    }
}