Example usage for org.apache.commons.vfs FileObject delete

List of usage examples for org.apache.commons.vfs FileObject delete

Introduction

In this page you can find the example usage for org.apache.commons.vfs FileObject delete.

Prototype

public int delete(FileSelector selector) throws FileSystemException;

Source Link

Document

Deletes all descendents of this file that match a selector.

Usage

From source file:org.pentaho.di.job.entries.hadooptransjobexecutor.DistributedCacheUtilTest.java

@Test
public void findFiles_hdfs_native() throws Exception {
    DistributedCacheUtil ch = new DistributedCacheUtil();

    // Copy the contents of test folder
    FileObject source = createTestFolderWithContent();
    Path root = new Path("bin/test/stageArchiveForCacheTest");
    Configuration conf = new Configuration();
    org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.getLocal(conf);
    Path dest = new Path(root, "org/pentaho/mapreduce/");
    try {//www  .ja  va 2 s  .  co m
        try {
            ch.stageForCache(source, fs, dest, true);

            List<Path> files = ch.findFiles(fs, dest, null);
            assertEquals(3, files.size());

            files = ch.findFiles(fs, dest, Pattern.compile(".*jar$"));
            assertEquals(2, files.size());

            files = ch.findFiles(fs, dest, Pattern.compile(".*folder$"));
            assertEquals(1, files.size());
        } finally {
            fs.delete(root, true);
        }
    } finally {
        source.delete(new AllFileSelector());
    }
}

From source file:org.pentaho.di.job.entries.hadooptransjobexecutor.DistributedCacheUtilTest.java

@Test
public void stageForCache() throws Exception {
    DistributedCacheUtil ch = new DistributedCacheUtil();

    // Copy the contents of test folder
    FileObject source = createTestFolderWithContent();

    try {//from ww w .  j  a  v a  2s .c  o m
        Path root = new Path("bin/test/stageArchiveForCacheTest");
        Path dest = new Path(root, "org/pentaho/mapreduce/");

        Configuration conf = new Configuration();
        org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.getLocal(conf);

        stageForCacheTester(ch, source, fs, root, dest, 3, 2);
    } finally {
        source.delete(new AllFileSelector());
    }
}

From source file:org.pentaho.di.job.entries.hadooptransjobexecutor.DistributedCacheUtilTest.java

@Test
public void stageForCache_destination_no_overwrite() throws Exception {
    DistributedCacheUtil ch = new DistributedCacheUtil();

    Configuration conf = new Configuration();
    org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.getLocal(conf);

    FileObject source = createTestFolderWithContent();
    try {//from   w  w  w.j  ava 2  s. c om
        Path root = new Path("bin/test/stageForCache_destination_exists");
        Path dest = new Path(root, "dest");

        fs.mkdirs(dest);
        assertTrue(fs.exists(dest));
        assertTrue(fs.getFileStatus(dest).isDir());
        try {
            ch.stageForCache(source, fs, dest, false);
        } catch (KettleFileException ex) {
            assertTrue(ex.getMessage().contains("Destination exists"));
        } finally {
            fs.delete(root, true);
        }
    } finally {
        source.delete(new AllFileSelector());
    }
}

From source file:org.pentaho.di.job.entries.hadooptransjobexecutor.DistributedCacheUtilTest.java

@Test
public void stageForCache_destination_exists() throws Exception {
    DistributedCacheUtil ch = new DistributedCacheUtil();

    Configuration conf = new Configuration();
    org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.getLocal(conf);

    FileObject source = createTestFolderWithContent();
    try {//w  w w  . j  a  v  a 2  s.  c o  m
        Path root = new Path("bin/test/stageForCache_destination_exists");
        Path dest = new Path(root, "dest");

        fs.mkdirs(dest);
        assertTrue(fs.exists(dest));
        assertTrue(fs.getFileStatus(dest).isDir());

        stageForCacheTester(ch, source, fs, root, dest, 3, 2);
    } finally {
        source.delete(new AllFileSelector());
    }
}

From source file:org.pentaho.di.job.entries.hadooptransjobexecutor.DistributedCacheUtilTest.java

@Test
public void installKettleEnvironment() throws Exception {
    DistributedCacheUtil ch = new DistributedCacheUtil();

    Configuration conf = new Configuration();
    org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.getLocal(conf);

    // This "empty pmr" contains a lib/ folder but with no content
    FileObject pmrArchive = KettleVFS.getFileObject("test-res/empty-pmr.zip");

    FileObject bigDataPluginDir = createTestFolderWithContent(
            DistributedCacheUtil.PENTAHO_BIG_DATA_PLUGIN_FOLDER_NAME);

    Path root = new Path("bin/test/installKettleEnvironment");
    try {/*ww  w  .j  a  v  a2  s .  c o m*/
        ch.installKettleEnvironment(pmrArchive, fs, root, bigDataPluginDir, null);
        assertTrue(ch.isKettleEnvironmentInstalledAt(fs, root));
    } finally {
        bigDataPluginDir.delete(new AllFileSelector());
        fs.delete(root, true);
    }
}

From source file:org.pentaho.di.job.entries.hadooptransjobexecutor.DistributedCacheUtilTest.java

@Test
public void installKettleEnvironment_additional_plugins() throws Exception {
    DistributedCacheUtil ch = new DistributedCacheUtil();

    Configuration conf = new Configuration();
    org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.getLocal(conf);

    // This "empty pmr" contains a lib/ folder but with no content
    FileObject pmrArchive = KettleVFS.getFileObject("test-res/empty-pmr.zip");

    FileObject bigDataPluginDir = createTestFolderWithContent(
            DistributedCacheUtil.PENTAHO_BIG_DATA_PLUGIN_FOLDER_NAME);
    FileObject samplePluginDir = createTestFolderWithContent("sample-plugin");

    Path root = new Path("bin/test/installKettleEnvironment");
    try {//from w  ww.  j  a v  a2s.co m
        ch.installKettleEnvironment(pmrArchive, fs, root, bigDataPluginDir, Arrays.asList(samplePluginDir));
        assertTrue(ch.isKettleEnvironmentInstalledAt(fs, root));
        assertTrue(fs.exists(new Path(root, "plugins/sample-plugin")));
    } finally {
        bigDataPluginDir.delete(new AllFileSelector());
        samplePluginDir.delete(new AllFileSelector());
        fs.delete(root, true);
    }
}

From source file:org.pentaho.di.job.entries.hadooptransjobexecutor.DistributedCacheUtilTest.java

@Test
public void stagePluginsForCache() throws Exception {
    DistributedCacheUtil ch = new DistributedCacheUtil();

    Configuration conf = new Configuration();
    org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.getLocal(conf);

    Path pluginsDir = new Path("bin/test/plugins-installation-dir");

    FileObject pluginDir = createTestFolderWithContent();

    try {/*w w  w .j a va 2  s.  co  m*/
        ch.stagePluginsForCache(fs, pluginsDir, true, Arrays.asList(pluginDir));
        Path pluginInstallPath = new Path(pluginsDir, pluginDir.getURL().toURI().getPath());
        assertTrue(fs.exists(pluginInstallPath));
        ContentSummary summary = fs.getContentSummary(pluginInstallPath);
        assertEquals(3, summary.getFileCount());
        assertEquals(2, summary.getDirectoryCount());
    } finally {
        pluginDir.delete(new AllFileSelector());
        fs.delete(pluginsDir, true);
    }
}

From source file:org.pentaho.di.job.entries.hadooptransjobexecutor.DistributedCacheUtilTest.java

@Test
public void configureWithpmr() throws Exception {
    DistributedCacheUtil ch = new DistributedCacheUtil();

    Configuration conf = new Configuration();
    org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.getLocal(conf);

    // This "empty pmr" contains a lib/ folder and some empty kettle-*.jar files but no actual content
    FileObject pmrArchive = KettleVFS.getFileObject("test-res/empty-pmr.zip");

    FileObject bigDataPluginDir = createTestFolderWithContent(
            DistributedCacheUtil.PENTAHO_BIG_DATA_PLUGIN_FOLDER_NAME);

    Path root = new Path("bin/test/installKettleEnvironment");
    try {/* w  ww. j a  v a  2s .co m*/
        ch.installKettleEnvironment(pmrArchive, fs, root, bigDataPluginDir, null);
        assertTrue(ch.isKettleEnvironmentInstalledAt(fs, root));

        ch.configureWithKettleEnvironment(conf, fs, root);

        // Make sure our libraries are on the classpath
        assertTrue(conf.get("mapred.cache.files").contains("lib/kettle-core.jar"));
        assertTrue(conf.get("mapred.cache.files").contains("lib/kettle-engine.jar"));
        assertTrue(conf.get("mapred.job.classpath.files").contains("lib/kettle-core.jar"));
        assertTrue(conf.get("mapred.job.classpath.files").contains("lib/kettle-engine.jar"));

        // Make sure our plugins folder is registered
        assertTrue(conf.get("mapred.cache.files").contains("#plugins"));

        // Make sure our libraries aren't included twice
        assertFalse(conf.get("mapred.cache.files").contains("#lib"));

        // We should not have individual files registered
        assertFalse(conf.get("mapred.cache.files").contains("pentaho-big-data-plugin/jar1.jar"));
        assertFalse(conf.get("mapred.cache.files").contains("pentaho-big-data-plugin/jar2.jar"));
        assertFalse(conf.get("mapred.cache.files").contains("pentaho-big-data-plugin/folder/file.txt"));

    } finally {
        bigDataPluginDir.delete(new AllFileSelector());
        fs.delete(root, true);
    }
}

From source file:org.pentaho.hadoop.mapreduce.test.TestSubmitMapReduceJob.java

@BeforeClass
public static void beforeClass() throws IOException {
    fsManager = VFS.getManager();/*from   w ww. jav  a  2s . co m*/
    Properties settings = new Properties();
    settings.load(TestSubmitMapReduceJob.class.getResourceAsStream("/test-settings.properties"));
    hostname = settings.getProperty("hostname", hostname);
    hdfsPort = settings.getProperty("hdfsPort", hdfsPort);
    trackerPort = settings.getProperty("trackerPort", trackerPort);
    username = settings.getProperty("username", username);
    password = settings.getProperty("password", password);

    // file management
    // first delete any existing resources that will conflict
    FileObject file = fsManager.resolveFile(buildHDFSURL("/junit/wordcount/output"));
    file.delete(new FileSelector() {
        public boolean includeFile(FileSelectInfo arg0) throws Exception {
            return true;
        }

        public boolean traverseDescendents(FileSelectInfo arg0) throws Exception {
            return true;
        }
    });
}

From source file:org.pentaho.hadoop.mapreduce.test.TransMapReduceJobTestFIXME.java

@BeforeClass
public static void beforeClass() throws IOException {
    fsManager = VFS.getManager();//w  w w .  j  a  v a2  s  .  c o  m
    Properties settings = new Properties();
    settings.load(TransMapReduceJobTestFIXME.class.getResourceAsStream("/test-settings.properties"));
    hostname = settings.getProperty("hostname", hostname);
    hdfsPort = settings.getProperty("hdfsPort", hdfsPort);
    trackerPort = settings.getProperty("trackerPort", trackerPort);
    username = settings.getProperty("username", username);
    password = settings.getProperty("password", password);

    // file management
    // first delete any existing resources that will conflict
    FileObject file = fsManager.resolveFile(buildHDFSURL("/junit/wordcount/output"));
    file.delete(new FileSelector() {
        public boolean includeFile(FileSelectInfo arg0) throws Exception {
            return true;
        }

        public boolean traverseDescendents(FileSelectInfo arg0) throws Exception {
            return true;
        }
    });
}