Example usage for org.apache.commons.vfs2 FileObject createFolder

List of usage examples for org.apache.commons.vfs2 FileObject createFolder

Introduction

In this page you can find the example usage for org.apache.commons.vfs2 FileObject createFolder.

Prototype

void createFolder() throws FileSystemException;

Source Link

Document

Creates this folder, if it does not exist.

Usage

From source file:org.pentaho.di.job.entries.googledrive.JobEntryGoogleDriveExport.java

protected void checkFolderExists(FileObject folder, boolean createIfNot) throws KettleException {
    if (folder == null) {
        return;/*  www .ja v a2s .  co m*/
    }
    try {
        if (!folder.exists() && createIfNot) {
            if (log.isDetailed()) {
                log.logDetailed(BaseMessages.getString(PKG, "GoogleDriveExport.Log.CreatingTargetFolder"));
            }
            folder.createFolder();
            return;
        } else {
            if (!folder.exists()) {
                throw new KettleException(BaseMessages.getString(PKG, "GoogleDriveExport.Error.FolderNotExist",
                        folder.getName()));
            } else if (!folder.isFolder()) {
                throw new KettleException(
                        BaseMessages.getString(PKG, "GoogleDriveExport.Error.NotAFolder", folder.getName()));
            }
        }
    } catch (FileSystemException e) {
        throw new KettleException(e);
    }
}

From source file:org.pentaho.di.job.entries.hadoopjobexecutor.JarUtilityTest.java

@BeforeClass
public static void setup() throws Exception {
    FileObject testPath = VFS.getManager().resolveFile(TEST_PATH);
    testPath.delete(new AllFileSelector());
    testPath.createFolder();
}

From source file:org.pentaho.di.plugins.fileopensave.providers.vfs.VFSFileProvider.java

/**
 * @param folder/*from www  .  j  a  v a  2 s. c o  m*/
 * @return
 */
@Override
public VFSFile add(VFSFile folder) {
    try {
        FileObject fileObject = KettleVFS.getFileObject(folder.getPath(), new Variables(),
                VFSHelper.getOpts(folder.getPath(), folder.getConnection()));
        fileObject.createFolder();
        String parent = folder.getPath().substring(0, folder.getPath().length() - 1);
        return VFSDirectory.create(parent, fileObject, folder.getConnection());
    } catch (KettleFileException | FileSystemException kfe) {
        // TODO: Do something smart here
    }
    return null;
}

From source file:org.pentaho.di.trans.steps.enhanced.jsonoutput.JsonOutput.java

private void createParentFolder(String filename) throws KettleStepException {
    if (!meta.isCreateParentFolder()) {
        return;//from  ww  w.  j  a  v a 2s  . co m
    }
    // Check for parent folder
    FileObject parentfolder = null;
    try {
        // Get parent folder
        parentfolder = KettleVFS.getFileObject(filename, getTransMeta()).getParent();
        if (!parentfolder.exists()) {
            if (log.isDebug()) {
                logDebug(BaseMessages.getString(PKG, "JsonOutput.Error.ParentFolderNotExist",
                        parentfolder.getName()));
            }
            parentfolder.createFolder();
            if (log.isDebug()) {
                logDebug(BaseMessages.getString(PKG, "JsonOutput.Log.ParentFolderCreated"));
            }
        }
    } catch (Exception e) {
        throw new KettleStepException(BaseMessages.getString(PKG, "JsonOutput.Error.ErrorCreatingParentFolder",
                parentfolder.getName()));
    } finally {
        if (parentfolder != null) {
            try {
                parentfolder.close();
            } catch (Exception ex) { /* Ignore */
            }
        }
    }
}

From source file:org.pentaho.googledrive.vfs.test.GoogleDriveFileObjectTest.java

@Test
public void testFileObject() throws Exception {
    FileSystemManager manager = mock(FileSystemManager.class);
    GoogleDriveFileObject fileObjectMock = mock(GoogleDriveFileObject.class);
    when(manager.resolveFile(FOLDER)).thenReturn(fileObjectMock);
    when(fileObjectMock.isFolder()).thenReturn(true);
    when(fileObjectMock.exists()).thenReturn(true);
    when(fileObjectMock.delete()).thenReturn(true);
    FileObject fileObject = manager.resolveFile(FOLDER);
    fileObject.createFolder();
    assertTrue(fileObject.isFolder());//www  . ja v  a2  s  .  c o  m
    assertTrue(fileObject.exists());
    assertTrue(fileObject.delete());
    assertNull(fileObject.getChildren());
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheTestUtil.java

static FileObject createTestHadoopConfiguration(String rootFolderName) throws Exception {
    FileObject location = KettleVFS.getFileObject(rootFolderName + "/hadoop-configurations/test-config");

    FileObject lib = location.resolveFile("lib");
    FileObject libPmr = lib.resolveFile("pmr");
    FileObject pmrLibJar = libPmr.resolveFile("configuration-specific.jar");

    lib.createFolder();
    lib.resolveFile("required.jar").createFile();

    libPmr.createFolder();//from   w  w w  .  j  a  v  a  2 s. c  o  m
    pmrLibJar.createFile();

    return location;
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImpl.java

/**
 * Extract a zip archive to a directory.
 *
 * @param archive Zip archive to extract
 * @param dest    Destination directory. This must not exist!
 * @return Directory the zip was extracted into
 * @throws IllegalArgumentException when the archive file does not exist or the destination directory already exists
 * @throws IOException//from  w  w w .j a v a2 s. c o m
 * @throws KettleFileException
 */
public FileObject extract(FileObject archive, FileObject dest) throws IOException, KettleFileException {
    if (!archive.exists()) {
        throw new IllegalArgumentException("archive does not exist: " + archive.getURL().getPath());
    }

    if (dest.exists()) {
        throw new IllegalArgumentException("destination already exists");
    }
    dest.createFolder();

    try {
        byte[] buffer = new byte[DEFAULT_BUFFER_SIZE];
        int len = 0;
        ZipInputStream zis = new ZipInputStream(archive.getContent().getInputStream());
        try {
            ZipEntry ze;
            while ((ze = zis.getNextEntry()) != null) {
                FileObject entry = KettleVFS.getFileObject(dest + Const.FILE_SEPARATOR + ze.getName());
                FileObject parent = entry.getParent();
                if (parent != null) {
                    parent.createFolder();
                }
                if (ze.isDirectory()) {
                    entry.createFolder();
                    continue;
                }

                OutputStream os = KettleVFS.getOutputStream(entry, false);
                try {
                    while ((len = zis.read(buffer)) > 0) {
                        os.write(buffer, 0, len);
                    }
                } finally {
                    if (os != null) {
                        os.close();
                    }
                }
            }
        } finally {
            if (zis != null) {
                zis.close();
            }
        }
    } catch (Exception ex) {
        // Try to clean up the temp directory and all files
        if (!deleteDirectory(dest)) {
            throw new KettleFileException("Could not clean up temp dir after error extracting", ex);
        }
        throw new KettleFileException("error extracting archive", ex);
    }

    return dest;
}

From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImplTest.java

@Test
public void deleteDirectory() throws Exception {
    FileObject test = KettleVFS.getFileObject("bin/test/deleteDirectoryTest");
    test.createFolder();

    DistributedCacheUtilImpl ch = new DistributedCacheUtilImpl(TEST_CONFIG);
    ch.deleteDirectory(test);/*from  w ww. j  ava 2s  .c  o  m*/
    try {
        assertFalse(test.exists());
    } finally {
        // Delete the directory with java.io.File if it wasn't removed
        File f = new File("bin/test/deleteDirectoryTest");
        if (f.exists() && !f.delete()) {
            throw new IOException("unable to delete test directory: " + f.getAbsolutePath());
        }
    }
}

From source file:org.pentaho.hadoop.shim.HadoopConfigurationLocatorTest.java

@BeforeClass
public static void setup() throws Exception {
    // Create a test hadoop configuration "a"
    FileObject ramRoot = VFS.getManager().resolveFile(HADOOP_CONFIGURATIONS_PATH);
    FileObject aConfigFolder = ramRoot.resolveFile("a");
    if (aConfigFolder.exists()) {
        aConfigFolder.delete(new AllFileSelector());
    }/*from  w ww  .  j  ava2  s .com*/
    aConfigFolder.createFolder();

    assertEquals(FileType.FOLDER, aConfigFolder.getType());

    // Create the properties file for the configuration as hadoop-configurations/a/config.properties
    configFile = aConfigFolder.resolveFile("config.properties");
    Properties p = new Properties();
    p.setProperty("name", "Test Configuration A");
    p.setProperty("classpath", "");
    p.setProperty("ignore.classes", "");
    p.setProperty("library.path", "");
    p.setProperty("required.classes", HadoopConfigurationLocatorTest.class.getName());
    p.store(configFile.getContent().getOutputStream(), "Test Configuration A");
    configFile.close();

    // Create the implementation jar
    FileObject implJar = aConfigFolder.resolveFile("a-config.jar");
    implJar.createFile();

    // Use ShrinkWrap to create the jar and write it out to VFS
    JavaArchive archive = ShrinkWrap.create(JavaArchive.class, "a-configuration.jar")
            .addAsServiceProvider(HadoopShim.class, MockHadoopShim.class).addClass(MockHadoopShim.class);
    archive.as(ZipExporter.class).exportTo(implJar.getContent().getOutputStream());
}

From source file:org.pentaho.hadoop.shim.HadoopExcludeJarsTest.java

@BeforeClass
public static void setup() throws Exception {
    // Create a test hadoop configuration
    FileObject ramRoot = VFS.getManager().resolveFile(HADOOP_CONFIGURATIONS_PATH);
    if (ramRoot.exists()) {
        ramRoot.delete(new AllFileSelector());
    }//from w  w w . j  ava2s  . com
    ramRoot.createFolder();

    // Create the implementation jars
    ramRoot.resolveFile("xercesImpl-2.9.1.jar").createFile();
    ramRoot.resolveFile("xml-apis-1.3.04.jar").createFile();
    ramRoot.resolveFile("xml-apis-ext-1.3.04.jar").createFile();
    ramRoot.resolveFile("xerces-version-1.8.0.jar").createFile();
    ramRoot.resolveFile("xercesImpl2-2.9.1.jar").createFile();
    ramRoot.resolveFile("pentaho-hadoop-shims-api-61.2016.04.01-196.jar").createFile();
    ramRoot.resolveFile("commands-3.3.0-I20070605-0010.jar").createFile();
    ramRoot.resolveFile("postgresql-9.3-1102-jdbc4.jar").createFile();
    ramRoot.resolveFile("trilead-ssh2-build213.jar").createFile();
    ramRoot.resolveFile("trilead-ssh2-build215.jar").createFile();
}