Example usage for org.apache.commons.vfs2 FileObject delete

List of usage examples for org.apache.commons.vfs2 FileObject delete

Introduction

In this page you can find the example usage for org.apache.commons.vfs2 FileObject delete.

Prototype

int delete(FileSelector selector) throws FileSystemException;

Source Link

Document

Deletes all descendants of this file that match a selector.

Usage

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplOSDependentTest.java

@Test
public void stageForCache() throws Exception {
    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    // Copy the contents of test folder
    FileObject source = DistributedCacheTestUtil.createTestFolderWithContent();

    try {//from   w ww . ja v a 2s. co m
        Path root = new Path("bin/test/stageArchiveForCacheTest");
        Path dest = new Path(root, "org/pentaho/mapreduce/");

        Configuration conf = new Configuration();
        FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem(conf);

        DistributedCacheTestUtil.stageForCacheTester(ch, source, fs, root, dest, 6, 6);
    } finally {
        source.delete(new AllFileSelector());
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplOSDependentTest.java

@Test
public void stageForCache_destination_exists() throws Exception {
    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    Configuration conf = new Configuration();
    FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem(conf);

    FileObject source = DistributedCacheTestUtil.createTestFolderWithContent();
    try {// w w  w  . j a v a2  s  .  co  m
        Path root = new Path("bin/test/stageForCache_destination_exists");
        Path dest = new Path(root, "dest");

        fs.mkdirs(dest);
        assertTrue(fs.exists(dest));
        assertTrue(fs.getFileStatus(dest).isDir());

        DistributedCacheTestUtil.stageForCacheTester(ch, source, fs, root, dest, 6, 6);
    } finally {
        source.delete(new AllFileSelector());
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplOSDependentTest.java

@Test
public void stagePluginsForCache() throws Exception {
    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    Configuration conf = new Configuration();
    FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem(conf);

    Path pluginsDir = new Path("bin/test/plugins-installation-dir");

    FileObject pluginDir = DistributedCacheTestUtil.createTestFolderWithContent();

    try {/*w  w w  .j  a va  2  s.c  o m*/
        ch.stagePluginsForCache(fs, pluginsDir, "bin/test/sample-folder");
        Path pluginInstallPath = new Path(pluginsDir, "bin/test/sample-folder");
        assertTrue(fs.exists(pluginInstallPath));
        ContentSummary summary = fs.getContentSummary(pluginInstallPath);
        assertEquals(6, summary.getFileCount());
        assertEquals(6, summary.getDirectoryCount());
    } finally {
        pluginDir.delete(new AllFileSelector());
        fs.delete(pluginsDir, true);
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplOSDependentTest.java

@Test
public void findFiles_hdfs_native() throws Exception {
    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    // Copy the contents of test folder
    FileObject source = DistributedCacheTestUtil.createTestFolderWithContent();
    Path root = new Path("bin/test/stageArchiveForCacheTest");
    Configuration conf = new Configuration();
    FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem(conf);
    Path dest = new Path(root, "org/pentaho/mapreduce/");
    try {/*from ww  w  .  j  av a  2 s.  c o  m*/
        try {
            ch.stageForCache(source, fs, dest, true);

            List<Path> files = ch.findFiles(fs, dest, null);
            assertEquals(5, files.size());

            files = ch.findFiles(fs, dest, Pattern.compile(".*jar$"));
            assertEquals(2, files.size());

            files = ch.findFiles(fs, dest, Pattern.compile(".*folder$"));
            assertEquals(1, files.size());
        } finally {
            fs.delete(root, true);
        }
    } finally {
        source.delete(new AllFileSelector());
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplOSDependentTest.java

@Test
public void installKettleEnvironment() throws Exception {
    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    Configuration conf = new Configuration();
    FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem(conf);

    // This "empty pmr" contains a lib/ folder but with no content
    FileObject pmrArchive = KettleVFS.getFileObject(getClass().getResource("/empty-pmr.zip").toURI().getPath());

    FileObject bigDataPluginDir = DistributedCacheTestUtil
            .createTestFolderWithContent(DistributedCacheUtilImpl.PENTAHO_BIG_DATA_PLUGIN_FOLDER_NAME);

    Path root = new Path("bin/test/installKettleEnvironment");
    try {/*from ww  w.  j  a v a2 s.  c om*/
        ch.installKettleEnvironment(pmrArchive, fs, root, bigDataPluginDir, null);
        assertTrue(ch.isKettleEnvironmentInstalledAt(fs, root));
    } finally {
        bigDataPluginDir.delete(new AllFileSelector());
        fs.delete(root, true);
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplOSDependentTest.java

@Test
public void installKettleEnvironment_additional_plugins() throws Exception {
    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    Configuration conf = new Configuration();
    FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem(conf);

    // This "empty pmr" contains a lib/ folder but with no content
    FileObject pmrArchive = KettleVFS.getFileObject(getClass().getResource("/empty-pmr.zip").toURI().getPath());
    FileObject bigDataPluginDir = DistributedCacheTestUtil
            .createTestFolderWithContent(DistributedCacheUtilImpl.PENTAHO_BIG_DATA_PLUGIN_FOLDER_NAME);

    String pluginName = "additional-plugin";
    FileObject additionalPluginDir = DistributedCacheTestUtil.createTestFolderWithContent(pluginName);
    Path root = new Path("bin/test/installKettleEnvironment");
    try {//  www  . j a  v  a2s .com
        ch.installKettleEnvironment(pmrArchive, fs, root, bigDataPluginDir, "bin/test/" + pluginName);
        assertTrue(ch.isKettleEnvironmentInstalledAt(fs, root));
        assertTrue(fs.exists(new Path(root, "plugins/bin/test/" + pluginName)));
    } finally {
        bigDataPluginDir.delete(new AllFileSelector());
        additionalPluginDir.delete(new AllFileSelector());
        fs.delete(root, true);
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplOSDependentTest.java

@Test
public void configureWithPmr() throws Exception {
    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    Configuration conf = new Configuration();
    FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem(conf);

    // This "empty pmr" contains a lib/ folder and some empty kettle-*.jar files but no actual content
    FileObject pmrArchive = KettleVFS.getFileObject(getClass().getResource("/empty-pmr.zip").toURI().getPath());

    FileObject bigDataPluginDir = DistributedCacheTestUtil
            .createTestFolderWithContent(DistributedCacheUtilImpl.PENTAHO_BIG_DATA_PLUGIN_FOLDER_NAME);

    Path root = new Path("bin/test/installKettleEnvironment");
    try {//from   w  w  w  .  j ava2s . c o m
        ch.installKettleEnvironment(pmrArchive, fs, root, bigDataPluginDir, null);
        assertTrue(ch.isKettleEnvironmentInstalledAt(fs, root));

        ch.configureWithKettleEnvironment(conf, fs, root);

        // Make sure our libraries are on the classpathi
        assertTrue(conf.get("mapred.cache.files").contains("lib/kettle-core.jar"));
        assertTrue(conf.get("mapred.cache.files").contains("lib/kettle-engine.jar"));
        assertTrue(conf.get("mapred.job.classpath.files").contains("lib/kettle-core.jar"));
        assertTrue(conf.get("mapred.job.classpath.files").contains("lib/kettle-engine.jar"));

        // Make sure the configuration specific jar made it!
        assertTrue(conf.get("mapred.cache.files").contains("lib/configuration-specific.jar"));

        // Make sure our plugins folder is registered
        assertTrue(conf.get("mapred.cache.files").contains("#plugins"));

        // Make sure our libraries aren't included twice
        assertFalse(conf.get("mapred.cache.files").contains("#lib"));

        // We should not have individual files registered
        assertFalse(conf.get("mapred.cache.files").contains("pentaho-big-data-plugin/jar1.jar"));
        assertFalse(conf.get("mapred.cache.files").contains("pentaho-big-data-plugin/jar2.jar"));
        assertFalse(conf.get("mapred.cache.files").contains("pentaho-big-data-plugin/folder/file.txt"));

    } finally {
        bigDataPluginDir.delete(new AllFileSelector());
        fs.delete(root, true);
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplTest.java

@Test
public void findFiles_vfs() throws Exception {
    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    FileObject testFolder = DistributedCacheTestUtil.createTestFolderWithContent();

    try {//from   w ww. j av  a 2s  .  c  o m
        // Simply test we can find the jar files in our test folder
        List<String> jars = ch.findFiles(testFolder, "jar");
        assertEquals(4, jars.size());

        // Look for all files and folders
        List<String> all = ch.findFiles(testFolder, null);
        assertEquals(12, all.size());
    } finally {
        testFolder.delete(new AllFileSelector());
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplTest.java

@Test
public void findFiles_vfs_hdfs() throws Exception {

    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    URL url = new URL("http://localhost:8020/path/to/file");
    Configuration conf = mock(Configuration.class);
    FileSystem fs = mock(FileSystem.class);
    FileObject source = mock(FileObject.class);
    Path dest = mock(Path.class);
    FileObject hdfsDest = mock(FileObject.class);
    Path root = mock(Path.class);

    FileObject[] fileObjects = new FileObject[12];
    for (int i = 0; i < fileObjects.length; i++) {
        URL fileUrl = new URL("http://localhost:8020/path/to/file/" + i);
        FileObject fileObject = mock(FileObject.class);
        fileObjects[i] = fileObject;/*from w  ww. j  ava 2  s.  co m*/
        doReturn(fileUrl).when(fileObject).getURL();
    }

    doReturn(url).when(source).getURL();
    doReturn(conf).when(fs).getConf();
    doReturn(0).when(conf).getInt(any(String.class), anyInt());
    doReturn(true).when(source).exists();
    doReturn(fileObjects).when(hdfsDest).findFiles(any(FileSelector.class));
    doReturn(true).when(fs).delete(root, true);
    doReturn(fileObjects.length).when(source).delete(any(AllFileSelector.class));
    doNothing().when(fs).copyFromLocalFile(any(Path.class), any(Path.class));
    doNothing().when(fs).setPermission(any(Path.class), any(FsPermission.class));
    doReturn(true).when(fs).setReplication(any(Path.class), anyShort());

    try {
        try {
            ch.stageForCache(source, fs, dest, true);

            List<String> files = ch.findFiles(hdfsDest, null);
            assertEquals(12, files.size());
        } finally {
            fs.delete(root, true);
        }
    } finally {
        source.delete(new AllFileSelector());
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplTest.java

@Test
public void stageForCache_destination_no_overwrite() throws Exception {
    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    Configuration conf = new Configuration();
    FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem(conf);

    FileObject source = DistributedCacheTestUtil.createTestFolderWithContent();
    try {/*from   www . j a v a  2  s .c  o m*/
        Path root = new Path("bin/test/stageForCache_destination_exists");
        Path dest = new Path(root, "dest");

        fs.mkdirs(dest);
        assertTrue(fs.exists(dest));
        assertTrue(fs.getFileStatus(dest).isDir());
        try {
            ch.stageForCache(source, fs, dest, false);
        } catch (KettleFileException ex) {
            assertTrue(ex.getMessage(), ex.getMessage().contains("Destination exists"));
        } finally {
            fs.delete(root, true);
        }
    } finally {
        source.delete(new AllFileSelector());
    }
}