Example usage for org.apache.commons.vfs2 AllFileSelector AllFileSelector

List of usage examples for org.apache.commons.vfs2 AllFileSelector AllFileSelector

Introduction

In this page you can find the example usage for org.apache.commons.vfs2 AllFileSelector AllFileSelector.

Prototype

AllFileSelector

Source Link

Usage

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplOSDependentTest.java

@Test
public void stageForCache() throws Exception {
    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    // Copy the contents of test folder
    FileObject source = DistributedCacheTestUtil.createTestFolderWithContent();

    try {//from w w  w.  jav  a2  s.  c o  m
        Path root = new Path("bin/test/stageArchiveForCacheTest");
        Path dest = new Path(root, "org/pentaho/mapreduce/");

        Configuration conf = new Configuration();
        FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem(conf);

        DistributedCacheTestUtil.stageForCacheTester(ch, source, fs, root, dest, 6, 6);
    } finally {
        source.delete(new AllFileSelector());
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplOSDependentTest.java

@Test
public void stageForCache_destination_exists() throws Exception {
    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    Configuration conf = new Configuration();
    FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem(conf);

    FileObject source = DistributedCacheTestUtil.createTestFolderWithContent();
    try {//from  ww  w  .  j  a  v a2s .co m
        Path root = new Path("bin/test/stageForCache_destination_exists");
        Path dest = new Path(root, "dest");

        fs.mkdirs(dest);
        assertTrue(fs.exists(dest));
        assertTrue(fs.getFileStatus(dest).isDir());

        DistributedCacheTestUtil.stageForCacheTester(ch, source, fs, root, dest, 6, 6);
    } finally {
        source.delete(new AllFileSelector());
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplOSDependentTest.java

@Test
public void stagePluginsForCache() throws Exception {
    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    Configuration conf = new Configuration();
    FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem(conf);

    Path pluginsDir = new Path("bin/test/plugins-installation-dir");

    FileObject pluginDir = DistributedCacheTestUtil.createTestFolderWithContent();

    try {//from   w w  w.j ava 2 s .  c o m
        ch.stagePluginsForCache(fs, pluginsDir, "bin/test/sample-folder");
        Path pluginInstallPath = new Path(pluginsDir, "bin/test/sample-folder");
        assertTrue(fs.exists(pluginInstallPath));
        ContentSummary summary = fs.getContentSummary(pluginInstallPath);
        assertEquals(6, summary.getFileCount());
        assertEquals(6, summary.getDirectoryCount());
    } finally {
        pluginDir.delete(new AllFileSelector());
        fs.delete(pluginsDir, true);
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplOSDependentTest.java

@Test
public void findFiles_hdfs_native() throws Exception {
    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    // Copy the contents of test folder
    FileObject source = DistributedCacheTestUtil.createTestFolderWithContent();
    Path root = new Path("bin/test/stageArchiveForCacheTest");
    Configuration conf = new Configuration();
    FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem(conf);
    Path dest = new Path(root, "org/pentaho/mapreduce/");
    try {/*from   w  ww. ja v  a 2 s .  com*/
        try {
            ch.stageForCache(source, fs, dest, true);

            List<Path> files = ch.findFiles(fs, dest, null);
            assertEquals(5, files.size());

            files = ch.findFiles(fs, dest, Pattern.compile(".*jar$"));
            assertEquals(2, files.size());

            files = ch.findFiles(fs, dest, Pattern.compile(".*folder$"));
            assertEquals(1, files.size());
        } finally {
            fs.delete(root, true);
        }
    } finally {
        source.delete(new AllFileSelector());
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplOSDependentTest.java

@Test
public void installKettleEnvironment() throws Exception {
    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    Configuration conf = new Configuration();
    FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem(conf);

    // This "empty pmr" contains a lib/ folder but with no content
    FileObject pmrArchive = KettleVFS.getFileObject(getClass().getResource("/empty-pmr.zip").toURI().getPath());

    FileObject bigDataPluginDir = DistributedCacheTestUtil
            .createTestFolderWithContent(DistributedCacheUtilImpl.PENTAHO_BIG_DATA_PLUGIN_FOLDER_NAME);

    Path root = new Path("bin/test/installKettleEnvironment");
    try {//from ww  w . j  av a  2  s. co  m
        ch.installKettleEnvironment(pmrArchive, fs, root, bigDataPluginDir, null);
        assertTrue(ch.isKettleEnvironmentInstalledAt(fs, root));
    } finally {
        bigDataPluginDir.delete(new AllFileSelector());
        fs.delete(root, true);
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplOSDependentTest.java

@Test
public void installKettleEnvironment_additional_plugins() throws Exception {
    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    Configuration conf = new Configuration();
    FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem(conf);

    // This "empty pmr" contains a lib/ folder but with no content
    FileObject pmrArchive = KettleVFS.getFileObject(getClass().getResource("/empty-pmr.zip").toURI().getPath());
    FileObject bigDataPluginDir = DistributedCacheTestUtil
            .createTestFolderWithContent(DistributedCacheUtilImpl.PENTAHO_BIG_DATA_PLUGIN_FOLDER_NAME);

    String pluginName = "additional-plugin";
    FileObject additionalPluginDir = DistributedCacheTestUtil.createTestFolderWithContent(pluginName);
    Path root = new Path("bin/test/installKettleEnvironment");
    try {//from  w  w w.j  ava  2 s .c  o m
        ch.installKettleEnvironment(pmrArchive, fs, root, bigDataPluginDir, "bin/test/" + pluginName);
        assertTrue(ch.isKettleEnvironmentInstalledAt(fs, root));
        assertTrue(fs.exists(new Path(root, "plugins/bin/test/" + pluginName)));
    } finally {
        bigDataPluginDir.delete(new AllFileSelector());
        additionalPluginDir.delete(new AllFileSelector());
        fs.delete(root, true);
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplOSDependentTest.java

@Test
public void configureWithPmr() throws Exception {
    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    Configuration conf = new Configuration();
    FileSystem fs = DistributedCacheTestUtil.getLocalFileSystem(conf);

    // This "empty pmr" contains a lib/ folder and some empty kettle-*.jar files but no actual content
    FileObject pmrArchive = KettleVFS.getFileObject(getClass().getResource("/empty-pmr.zip").toURI().getPath());

    FileObject bigDataPluginDir = DistributedCacheTestUtil
            .createTestFolderWithContent(DistributedCacheUtilImpl.PENTAHO_BIG_DATA_PLUGIN_FOLDER_NAME);

    Path root = new Path("bin/test/installKettleEnvironment");
    try {/*from  www  .ja v  a 2 s .  c  o  m*/
        ch.installKettleEnvironment(pmrArchive, fs, root, bigDataPluginDir, null);
        assertTrue(ch.isKettleEnvironmentInstalledAt(fs, root));

        ch.configureWithKettleEnvironment(conf, fs, root);

        // Make sure our libraries are on the classpathi
        assertTrue(conf.get("mapred.cache.files").contains("lib/kettle-core.jar"));
        assertTrue(conf.get("mapred.cache.files").contains("lib/kettle-engine.jar"));
        assertTrue(conf.get("mapred.job.classpath.files").contains("lib/kettle-core.jar"));
        assertTrue(conf.get("mapred.job.classpath.files").contains("lib/kettle-engine.jar"));

        // Make sure the configuration specific jar made it!
        assertTrue(conf.get("mapred.cache.files").contains("lib/configuration-specific.jar"));

        // Make sure our plugins folder is registered
        assertTrue(conf.get("mapred.cache.files").contains("#plugins"));

        // Make sure our libraries aren't included twice
        assertFalse(conf.get("mapred.cache.files").contains("#lib"));

        // We should not have individual files registered
        assertFalse(conf.get("mapred.cache.files").contains("pentaho-big-data-plugin/jar1.jar"));
        assertFalse(conf.get("mapred.cache.files").contains("pentaho-big-data-plugin/jar2.jar"));
        assertFalse(conf.get("mapred.cache.files").contains("pentaho-big-data-plugin/folder/file.txt"));

    } finally {
        bigDataPluginDir.delete(new AllFileSelector());
        fs.delete(root, true);
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplTest.java

@Test
public void extractToTemp() throws Exception {
    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    FileObject archive = KettleVFS
            .getFileObject(getClass().getResource("/pentaho-mapreduce-sample.jar").toURI().getPath());
    FileObject extracted = ch.extractToTemp(archive);

    assertNotNull(extracted);/*from w ww . j  a  va2  s .  c o  m*/
    assertTrue(extracted.exists());
    try {
        // There should be 3 files and 5 directories inside the root folder (which is the 9th entry)
        assertTrue(extracted.findFiles(new AllFileSelector()).length == 9);
    } finally {
        // clean up after ourself
        ch.deleteDirectory(extracted);
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplTest.java

@Test
public void extractToTempZipEntriesMixed() throws Exception {
    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    File dest = File.createTempFile("entriesMixed", ".zip");
    ZipOutputStream outputStream = new ZipOutputStream(new FileOutputStream(dest));
    ZipEntry e = new ZipEntry("zipEntriesMixed" + "/" + "someFile.txt");
    outputStream.putNextEntry(e);//from w  ww  .  j  av  a  2s  .  c  o m
    byte[] data = "someOutString".getBytes();
    outputStream.write(data, 0, data.length);
    outputStream.closeEntry();
    e = new ZipEntry("zipEntriesMixed" + "/");
    outputStream.putNextEntry(e);
    outputStream.closeEntry();
    outputStream.close();

    FileObject archive = KettleVFS.getFileObject(dest.getAbsolutePath());

    FileObject extracted = null;
    try {
        extracted = ch.extractToTemp(archive);
    } catch (IOException | KettleFileException e1) {
        e1.printStackTrace();
        fail("Exception not expected in this case");
    }

    assertNotNull(extracted);
    assertTrue(extracted.exists());
    try {
        // There should be 3 files and 5 directories inside the root folder (which is the 9th entry)
        assertTrue(extracted.findFiles(new AllFileSelector()).length == 3);
    } finally {
        // clean up after ourself
        ch.deleteDirectory(extracted);
        dest.delete();
    }
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplTest.java

@Test
public void findFiles_vfs() throws Exception {
    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);

    FileObject testFolder = DistributedCacheTestUtil.createTestFolderWithContent();

    try {// w ww  . java 2s.  c o  m
        // Simply test we can find the jar files in our test folder
        List<String> jars = ch.findFiles(testFolder, "jar");
        assertEquals(4, jars.size());

        // Look for all files and folders
        List<String> all = ch.findFiles(testFolder, null);
        assertEquals(12, all.size());
    } finally {
        testFolder.delete(new AllFileSelector());
    }
}