Example usage for org.apache.commons.vfs2 FileObject getChild

List of usage examples for org.apache.commons.vfs2 FileObject getChild

Introduction

In this page you can find the example usage for org.apache.commons.vfs2 FileObject getChild.

Prototype

FileObject getChild(String name) throws FileSystemException;

Source Link

Document

Returns a child of this file.

Usage

From source file:de.innovationgate.wga.common.DesignDirectory.java

public static FileObject getDesignDefinitionFile(FileObject parent) throws FileSystemException {

    FileObject defFile = parent.getChild(DESIGN_DEFINITION_FILE);
    if (defFile == null) {
        defFile = parent.getChild(SYNCINFO_FILE);
    }/*from   w  w  w  .j  av a2  s .  co  m*/
    return defFile;

}

From source file:edu.byu.nlp.data.docs.VectorDocumentDatasetBuilder.java

/**
 * See class description./*from w ww  .ja  v a 2 s .c o m*/
 * 
 * @throws FileSystemException if there is a problem finding the specified directories on the
 *     filesystem.
 */
public VectorDocumentDatasetBuilder(String basedir, String dataset, String split) throws FileSystemException {
    // TODO: consider taking the FileObject as a parameter
    FileSystemManager fsManager = VFS.getManager();
    if (fsManager instanceof DefaultFileSystemManager) {
        ((DefaultFileSystemManager) fsManager).setBaseFile(new File("."));
    }
    this.basedir = fsManager.resolveFile(basedir);
    Preconditions.checkNotNull(this.basedir, "%s cannot be resolved", basedir);
    Preconditions.checkArgument(this.basedir.getType() == FileType.FOLDER);
    FileObject indices = this.basedir.getChild("indices");
    Preconditions.checkNotNull(indices, "cannot find indices directory in %s", basedir);
    FileObject datasetDir = indices.getChild(dataset);
    Preconditions.checkNotNull(datasetDir, "cannot find index for dataset %s", dataset);
    this.indexDirectory = datasetDir.getChild(split);
    Preconditions.checkNotNull(indexDirectory, "cannot find split %s", split);
    Preconditions.checkArgument(indexDirectory.getType() == FileType.FOLDER);
}

From source file:edu.byu.nlp.data.docs.DocumentDatasetBuilder.java

/**
 * See class description.//w  w  w .  jav  a  2 s . c o m
 * 
 * @throws FileSystemException if there is a problem finding the specified directories on the
 *     filesystem.
 */
public DocumentDatasetBuilder(String basedir, String dataset, String split,
        @Nullable Function<String, String> docTransform,
        @Nullable Function<String, Iterable<String>> sentenceSplitter,
        @Nullable Function<String, Iterable<String>> tokenizer,
        @Nullable Function<String, String> tokenTransform, FeatureSelectorFactory featureSelectorFactory,
        @Nullable Integer featureNormalizationConstant) throws FileSystemException {
    // TODO: consider taking the FileObject as a parameter
    FileSystemManager fsManager = VFS.getManager();
    if (fsManager instanceof DefaultFileSystemManager) {
        ((DefaultFileSystemManager) fsManager).setBaseFile(new File("."));
    }
    this.basedir = fsManager.resolveFile(basedir);
    Preconditions.checkNotNull(this.basedir, "%s cannot be resolved", basedir);
    Preconditions.checkArgument(this.basedir.getType() == FileType.FOLDER, this.basedir + " must be a folder");
    FileObject indices = this.basedir.getChild("indices");
    Preconditions.checkNotNull(indices, "cannot find indices directory in %s", basedir);
    FileObject datasetDir = indices.getChild(dataset);
    Preconditions.checkNotNull(datasetDir, "cannot find index for dataset %s", dataset);
    this.indexDirectory = datasetDir.getChild(split);
    Preconditions.checkNotNull(indexDirectory, "cannot find split %s", split);
    Preconditions.checkArgument(indexDirectory.getType() == FileType.FOLDER);
    this.docTransform = docTransform;
    this.sentenceSplitter = sentenceSplitter;
    this.tokenizer = tokenizer;
    this.tokenTransform = tokenTransform;
    this.featureSelectorFactory = featureSelectorFactory;
    this.featureNormalizationConstant = featureNormalizationConstant;
}

From source file:org.pentaho.di.core.hadoop.HadoopConfigurationBootstrap.java

public synchronized List<HadoopConfigurationInfo> getHadoopConfigurationInfos()
        throws KettleException, ConfigurationException, IOException {
    List<HadoopConfigurationInfo> result = new ArrayList<>();
    FileObject hadoopConfigurationsDir = resolveHadoopConfigurationsDirectory();
    // If the folder doesn't exist, return an empty list
    if (hadoopConfigurationsDir.exists()) {
        String activeId = getActiveConfigurationId();
        String willBeActiveId = getWillBeActiveConfigurationId();
        for (FileObject childFolder : hadoopConfigurationsDir.getChildren()) {
            if (childFolder.getType() == FileType.FOLDER) {
                String id = childFolder.getName().getBaseName();
                FileObject configPropertiesFile = childFolder.getChild(CONFIG_PROPERTIES);
                if (configPropertiesFile.exists()) {
                    Properties properties = new Properties();
                    properties.load(configPropertiesFile.getContent().getInputStream());
                    result.add(new HadoopConfigurationInfo(id, properties.getProperty("name", id),
                            id.equals(activeId), willBeActiveId.equals(id)));
                }//from   w ww. ja v a2 s  .c o  m
            }
        }
    }
    return result;
}

From source file:org.pentaho.hadoop.shim.HadoopConfigurationLocator.java

/**
 * Attempt to discover a valid Hadoop configuration from the provided folder.
 *
 * @param folder Folder that may represent a Hadoop configuration
 * @return A Hadoop configuration for the folder provided or null if none is found.
 * @throws ConfigurationException Error when loading the Hadoop configuration.
 *//*from  ww w .j a  v  a 2  s.  c  o  m*/
protected HadoopConfiguration loadHadoopConfiguration(FileObject folder) throws ConfigurationException {
    ShimProperties configurationProperties = new ShimProperties();
    try {
        FileObject configFile = folder.getChild(CONFIG_PROPERTIES_FILE);
        if (configFile != null) {
            configurationProperties.putAll(loadProperties(configFile));
        }
    } catch (Exception ex) {
        throw new ConfigurationException(BaseMessages.getString(PKG,
                "Error.UnableToLoadConfigurationProperties", CONFIG_PROPERTIES_FILE));
    }

    for (Entry<String, String> entry : configurationProperties.getPrefixedProperties("java.system")
            .entrySet()) {
        System.setProperty(entry.getKey(), entry.getValue());
    }

    try {
        List<URL> classpathElements = null;
        if (!isRunningOnCluster()) {
            // Parse all URLs from an optional classpath from the configuration file
            classpathElements = parseURLs(folder,
                    configurationProperties.getProperty(CONFIG_PROPERTY_CLASSPATH));
        }

        // Allow external configuration of classes to ignore
        String ignoredClassesProperty = configurationProperties.getProperty(CONFIG_PROPERTY_IGNORE_CLASSES);
        String[] ignoredClasses = null;
        if (!StringUtil.isEmpty(ignoredClassesProperty)) {
            ignoredClasses = ignoredClassesProperty.split(",");
        }

        // Pass our class loader in to the configurations' CL as its parent so it
        // can find the same
        // API classes we're using
        ClassLoader cl = createConfigurationLoader(folder, getClass().getClassLoader(), classpathElements,
                configurationProperties, ignoredClasses);
        verifyClasses(cl, configurationProperties.getProperty("required.classes"),
                configurationProperties.getProperty("name"));

        // Treat the Hadoop shim special. It is absolutely required for a Hadoop configuration.
        HadoopShim hadoopShim = null;
        List<PentahoHadoopShim> shims = new ArrayList<PentahoHadoopShim>();
        // Attempt to locate a shim within this folder
        for (Class<? extends PentahoHadoopShim> shimType : SHIM_TYPES) {
            PentahoHadoopShim s = locateServiceImpl(cl, shimType);
            if (s == null && shimType.getAnnotation(Required.class) != null) {
                logger.warn(BaseMessages.getString(PKG, "Error.MissingRequiredShim", shimType.getSimpleName()));
                // Do not continue to load the configuration if we are missing a required shim
                return null;
            }
            if (HadoopShim.class.isAssignableFrom(shimType)) {
                hadoopShim = (HadoopShim) s;
            } else {
                shims.add(s);
            }
        }
        String id = folder.getName().getBaseName();
        String name = configurationProperties.getProperty(CONFIG_PROPERTY_NAME, id);

        HadoopConfiguration config = new HadoopConfiguration(configurationProperties, folder, id, name,
                hadoopShim, shims.toArray(EMPTY_SHIM_ARRAY));

        // Register native libraries after everything else has been loaded successfully
        registerNativeLibraryPaths(configurationProperties.getProperty(CONFIG_PROPERTY_LIBRARY_PATH));

        hadoopShim.onLoad(config, fsm);
        return config;
    } catch (Throwable t) {
        throw new ConfigurationException(
                BaseMessages.getString(PKG, "Error.LoadingConfiguration") + " " + t.toString(), t);
    }
}

From source file:org.pentaho.reporting.designer.extensions.pentaho.repository.model.RepositoryTreeModel.java

public static FileObject findNodeByName(final FileObject node, final String name) throws FileSystemException {
    if (node.getType() != FileType.FOLDER) {
        return null;
    }/*from w  ww .j a  v a  2s .  c om*/
    final FileObject child = node.getChild(name);
    if (child == null) {
        return null;
    }
    return child;
}