Example usage for org.apache.commons.vfs2 FileObject resolveFile

List of usage examples for org.apache.commons.vfs2 FileObject resolveFile

Introduction

In this page you can find the example usage for org.apache.commons.vfs2 FileObject resolveFile.

Prototype

FileObject resolveFile(String path) throws FileSystemException;

Source Link

Document

Finds a file, relative to this file.

Usage

From source file:org.pentaho.hadoop.shim.common.DistributedCacheTestUtil.java

static FileObject createTestHadoopConfiguration(String rootFolderName) throws Exception {
    FileObject location = KettleVFS.getFileObject(rootFolderName + "/hadoop-configurations/test-config");

    FileObject lib = location.resolveFile("lib");
    FileObject libPmr = lib.resolveFile("pmr");
    FileObject pmrLibJar = libPmr.resolveFile("configuration-specific.jar");

    lib.createFolder();//from  w  w  w.  j a v  a 2 s . c o m
    lib.resolveFile("required.jar").createFile();

    libPmr.createFolder();
    pmrLibJar.createFile();

    return location;
}

From source file:org.pentaho.hadoop.shim.HadoopConfigurationLocator.java

/**
 * Parse a set of URLs from a comma-separated list of URLs. If the URL points to a directory all jar files within that
 * directory will be returned as well./*from   ww  w. j  a v a2s  .  co  m*/
 *
 * @param urlString Comma-separated list of URLs (relative or absolute)
 * @return List of URLs resolved from {@code urlString}
 */
protected List<URL> parseURLs(FileObject root, String urlString) {
    if (urlString == null || urlString.trim().isEmpty()) {
        return Collections.emptyList();
    }
    String[] paths = urlString.split(",");
    List<URL> urls = new ArrayList<URL>();
    for (String path : paths) {
        try {
            FileObject file = root.resolveFile(path.trim());
            if (!file.exists()) {
                file = defaultFsm.resolveFile(path.trim());
            }
            if (FileType.FOLDER.equals(file.getType())) {
                // Add directories with a trailing / so the URL ClassLoader interprets
                // them as directories
                urls.add(new URL(file.getURL().toExternalForm() + "/"));
                // Also add all jars within this directory
                urls.addAll(findJarsIn(file, 1, new HashSet<String>()));
            } else {
                urls.add(file.getURL());
            }
        } catch (Exception e) {
            // Log invalid path
            logger.error(BaseMessages.getString(PKG, "Error.InvalidClasspathEntry", path));
        }
    }
    return urls;
}

From source file:org.pentaho.hadoop.shim.HadoopConfigurationLocatorTest.java

@BeforeClass
public static void setup() throws Exception {
    // Create a test hadoop configuration "a"
    FileObject ramRoot = VFS.getManager().resolveFile(HADOOP_CONFIGURATIONS_PATH);
    FileObject aConfigFolder = ramRoot.resolveFile("a");
    if (aConfigFolder.exists()) {
        aConfigFolder.delete(new AllFileSelector());
    }/*from  w  w w.j  av a 2 s  .  c  o  m*/
    aConfigFolder.createFolder();

    assertEquals(FileType.FOLDER, aConfigFolder.getType());

    // Create the properties file for the configuration as hadoop-configurations/a/config.properties
    configFile = aConfigFolder.resolveFile("config.properties");
    Properties p = new Properties();
    p.setProperty("name", "Test Configuration A");
    p.setProperty("classpath", "");
    p.setProperty("ignore.classes", "");
    p.setProperty("library.path", "");
    p.setProperty("required.classes", HadoopConfigurationLocatorTest.class.getName());
    p.store(configFile.getContent().getOutputStream(), "Test Configuration A");
    configFile.close();

    // Create the implementation jar
    FileObject implJar = aConfigFolder.resolveFile("a-config.jar");
    implJar.createFile();

    // Use ShrinkWrap to create the jar and write it out to VFS
    JavaArchive archive = ShrinkWrap.create(JavaArchive.class, "a-configuration.jar")
            .addAsServiceProvider(HadoopShim.class, MockHadoopShim.class).addClass(MockHadoopShim.class);
    archive.as(ZipExporter.class).exportTo(implJar.getContent().getOutputStream());
}

From source file:org.pentaho.hadoop.shim.HadoopExcludeJarsTest.java

@BeforeClass
public static void setup() throws Exception {
    // Create a test hadoop configuration
    FileObject ramRoot = VFS.getManager().resolveFile(HADOOP_CONFIGURATIONS_PATH);
    if (ramRoot.exists()) {
        ramRoot.delete(new AllFileSelector());
    }/*from  www.  j  a v  a2  s  . c o  m*/
    ramRoot.createFolder();

    // Create the implementation jars
    ramRoot.resolveFile("xercesImpl-2.9.1.jar").createFile();
    ramRoot.resolveFile("xml-apis-1.3.04.jar").createFile();
    ramRoot.resolveFile("xml-apis-ext-1.3.04.jar").createFile();
    ramRoot.resolveFile("xerces-version-1.8.0.jar").createFile();
    ramRoot.resolveFile("xercesImpl2-2.9.1.jar").createFile();
    ramRoot.resolveFile("pentaho-hadoop-shims-api-61.2016.04.01-196.jar").createFile();
    ramRoot.resolveFile("commands-3.3.0-I20070605-0010.jar").createFile();
    ramRoot.resolveFile("postgresql-9.3-1102-jdbc4.jar").createFile();
    ramRoot.resolveFile("trilead-ssh2-build213.jar").createFile();
    ramRoot.resolveFile("trilead-ssh2-build215.jar").createFile();
}

From source file:org.pentaho.hadoop.shim.HadoopExcludeJarsTest.java

@Test
public void filterJars_arg_urls_containsOnlyExcludedJars() throws Exception {
    HadoopConfigurationLocator locator = new HadoopConfigurationLocator();
    FileObject root = VFS.getManager().resolveFile(HADOOP_CONFIGURATIONS_PATH);

    List<URL> urls = locator.parseURLs(root, root.toString());
    Iterator<URL> iterator = urls.listIterator();
    while (iterator.hasNext()) {
        URL url = iterator.next();
        if (FileType.FOLDER.equals(root.resolveFile(url.toString().trim()).getType())) {
            iterator.remove();/*from  ww w  . j av a2 s .  co m*/
        }
    }
    List<URL> list = locator.filterJars(urls,
            "xercesImpl,xml-apis-1.3.04.jar,xml-apis-ext-1.3.04,xerces-version-1.8.0,xercesImpl2-2.9.1,"
                    + "pentaho-hadoop-shims-api-61.2016.04.01-196,commands-3.3.0-I20070605-0010,postgresql,trilead-ssh2-build213"
                    + ".jar,trilead-ssh2-build215.jar");
    assertEquals(0, list.size());
}

From source file:org.pentaho.hadoop.shim.HadoopRunningOnClusterTest.java

@BeforeClass
public static void setup() throws Exception {
    // Create a test hadoop configuration
    FileObject ramRoot = VFS.getManager().resolveFile(CONFIG_PROPERTY_CLASSPATH);
    if (ramRoot.exists()) {
        ramRoot.delete(new AllFileSelector());
    }/*from  w  w w. ja va2s .  c  o m*/
    ramRoot.createFolder();

    // Create the implementation jars
    ramRoot.resolveFile("hadoop-mapreduce-client-app-2.7.0-mapr-1602.jar").createFile();
    ramRoot.resolveFile("hadoop-mapreduce-client-common-2.7.0-mapr-1602.jar").createFile();
    ramRoot.resolveFile("hadoop-mapreduce-client-contrib-2.7.0-mapr-1602.jar").createFile();
    ramRoot.resolveFile("hadoop-mapreduce-client-core-2.7.0-mapr-1602.jar").createFile();
    ramRoot.resolveFile("hadoop-mapreduce-client-hs-2.7.0-mapr-1602.jar").createFile();

    pmrFolder = tempFolder.newFolder("pmr");
    urlTestResources = Thread.currentThread().getContextClassLoader().getResource(PMR_PROPERTIES);
    Files.copy(Paths.get(urlTestResources.toURI()), Paths.get(pmrFolder.getAbsolutePath(), PMR_PROPERTIES));
}

From source file:org.pentaho.metaverse.impl.VfsLineageCollector.java

@Override
public List<String> listArtifactsForFile(String pathToArtifact, String startingDate, String endingDate)
        throws IllegalArgumentException {
    List<String> paths = new ArrayList<>();

    try {//from ww w .  j  ava 2s .  co  m
        FileSystemOptions opts = new FileSystemOptions();
        FileObject lineageRootFolder = KettleVFS.getFileObject(getOutputFolder(), opts);

        FileSelector dateRangeFilter = new VfsDateRangeFilter(format, startingDate, endingDate);
        FileSelector depthFilter = new FileDepthSelector(1, 256);

        if (lineageRootFolder.exists() && lineageRootFolder.getType() == FileType.FOLDER) {

            // get all of the date folders of lineage we have
            FileObject[] dayFolders = lineageRootFolder.findFiles(dateRangeFilter);
            for (FileObject dayFolder : dayFolders) {
                FileObject[] listThisFolder = dayFolder.findFiles(depthFilter);
                for (FileObject currentFile : listThisFolder) {
                    FileObject requested = currentFile.resolveFile(pathToArtifact);
                    if (requested.exists() && requested.getType() == FileType.FOLDER) {
                        FileObject[] requestedChildren = requested.getChildren();
                        for (FileObject requestedChild : requestedChildren) {
                            if (requestedChild.getType() == FileType.FILE) {
                                paths.add(requestedChild.getName().getPath());
                            }
                        }
                    }
                }
            }
        }
        return paths;
    } catch (Exception e) {
        throw new IllegalArgumentException(e);
    }
}

From source file:org.pentaho.metaverse.impl.VfsLineageWriter.java

protected OutputStream createOutputStream(LineageHolder holder, String extension) {
    if (holder != null) {
        try {/*w  w w .  j  a  va  2s.  c o m*/
            IExecutionProfile profile = holder.getExecutionProfile();
            String timestampString = Long.toString(profile.getExecutionData().getStartTime().getTime());
            FileObject destFolder = getOutputDirectoryAsFile(holder);
            String name = Const.NVL(profile.getName(), "unknown");
            FileObject file = destFolder.resolveFile(timestampString + "_" + name + extension);
            FileContent content = file.getContent();
            return content.getOutputStream();
        } catch (Exception e) {
            log.error(Messages.getErrorString("ERROR.CantCreateOutputStream"), e);
            return null;
        }
    } else {
        return null;
    }
}

From source file:org.pentaho.metaverse.impl.VfsLineageWriter.java

protected FileObject getOutputDirectoryAsFile(LineageHolder holder) {
    try {/*from  w  w  w.ja v  a  2s . c o  m*/
        FileObject dateRootFolder = getDateFolder(holder);
        dateRootFolder.createFolder();
        String id = holder.getId() == null ? "unknown_artifact" : holder.getId();
        if (id.startsWith(File.separator)) { // For *nix
            id = id.substring(1);
        } else if (Const.isWindows() && id.charAt(1) == ':') { // For windows
            id = id.replaceFirst(Pattern.quote(":"), "");
        }
        try {
            FileObject folder = dateRootFolder.resolveFile(id);
            folder.createFolder();
            if (folder.isFile()) {
                // must be a folder
                throw new IllegalStateException(
                        Messages.getErrorString("ERROR.OutputFolderWrongType", folder.getName().getPath()));
            }
            return folder;
        } catch (Exception e) {
            log.error(Messages.getErrorString("ERROR.CouldNotCreateFile"), e);
            return null;
        }
    } catch (Exception e) {
        log.error(Messages.getErrorString("ERROR.CouldNotCreateFile"), e);
        throw new IllegalStateException(e);
    }
}

From source file:org.pentaho.metaverse.impl.VfsLineageWriter.java

protected FileObject getDateFolder(LineageHolder holder) throws KettleFileException, FileSystemException {
    String dir = "";
    if (holder != null && holder.getExecutionProfile() != null) {
        IExecutionProfile profile = holder.getExecutionProfile();
        dir += dateFolderFormat.format(profile.getExecutionData().getStartTime());
    } else {/*from ww w.  j  a  v a 2s.c  o m*/
        dir += dateFolderFormat.format(new Date());
    }
    FileObject lineageRootFolder = KettleVFS.getFileObject(getOutputFolder());
    FileObject dateFolder = lineageRootFolder.resolveFile(dir);
    return dateFolder;
}