List of usage examples for org.apache.commons.vfs2 FileObject getURL
URL getURL() throws FileSystemException;
From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImpl.java
/** * Recursively searches for all files starting at the directory provided with the extension provided. If no extension * is provided all files will be returned. * * @param root Directory to start the search for files in * @param extension File extension to search for. If null all files will be returned. * @return List of absolute path names to all files found in {@code dir} and its subdirectories. * @throws KettleFileException// www .j a v a2 s .c o m * @throws FileSystemException */ @SuppressWarnings("unchecked") public List<String> findFiles(FileObject root, final String extension) throws FileSystemException { FileObject[] files = root.findFiles(new FileSelector() { @Override public boolean includeFile(FileSelectInfo fileSelectInfo) throws Exception { return extension == null || extension.equals(fileSelectInfo.getFile().getName().getExtension()); } @Override public boolean traverseDescendents(FileSelectInfo fileSelectInfo) throws Exception { return FileType.FOLDER.equals(fileSelectInfo.getFile().getType()); } }); if (files == null) { return Collections.EMPTY_LIST; } List<String> paths = new ArrayList<String>(); for (FileObject file : files) { try { paths.add(file.getURL().toURI().getPath()); } catch (URISyntaxException ex) { throw new FileSystemException("Error getting URI of file: " + file.getURL().getPath()); } } return paths; }
From source file:org.pentaho.hadoop.shim.common.DistributedCacheUtilImpl.java
/** * Extract a zip archive to a directory. * * @param archive Zip archive to extract * @param dest Destination directory. This must not exist! * @return Directory the zip was extracted into * @throws IllegalArgumentException when the archive file does not exist or the destination directory already exists * @throws IOException// w ww .j ava2 s .c om * @throws KettleFileException */ public FileObject extract(FileObject archive, FileObject dest) throws IOException, KettleFileException { if (!archive.exists()) { throw new IllegalArgumentException("archive does not exist: " + archive.getURL().getPath()); } if (dest.exists()) { throw new IllegalArgumentException("destination already exists"); } dest.createFolder(); try { byte[] buffer = new byte[DEFAULT_BUFFER_SIZE]; int len = 0; ZipInputStream zis = new ZipInputStream(archive.getContent().getInputStream()); try { ZipEntry ze; while ((ze = zis.getNextEntry()) != null) { FileObject entry = KettleVFS.getFileObject(dest + Const.FILE_SEPARATOR + ze.getName()); FileObject parent = entry.getParent(); if (parent != null) { parent.createFolder(); } if (ze.isDirectory()) { entry.createFolder(); continue; } OutputStream os = KettleVFS.getOutputStream(entry, false); try { while ((len = zis.read(buffer)) > 0) { os.write(buffer, 0, len); } } finally { if (os != null) { os.close(); } } } } finally { if (zis != null) { zis.close(); } } } catch (Exception ex) { // Try to clean up the temp directory and all files if (!deleteDirectory(dest)) { throw new KettleFileException("Could not clean up temp dir after error extracting", ex); } throw new KettleFileException("error extracting archive", ex); } return dest; }
From source file:org.pentaho.hadoop.shim.HadoopConfigurationLocator.java
/** * Attempt to find any Hadoop configuration as a direct descendant of the provided directory. * * @param baseDir Directory to look for Hadoop configurations in * @throws ConfigurationException//from w w w .j a va2s . c om */ private void findHadoopConfigurations(FileObject baseDir, ActiveHadoopConfigurationLocator activeLocator) throws ConfigurationException { configurations = new HashMap<String, HadoopConfiguration>(); try { if (!baseDir.exists()) { throw new ConfigurationException(BaseMessages.getString(PKG, "Error.HadoopConfigurationDirectoryDoesNotExist", baseDir.getURL())); } for (FileObject f : baseDir.findFiles(new FileSelector() { @Override public boolean includeFile(FileSelectInfo info) throws Exception { return info.getDepth() == 1 && FileType.FOLDER.equals(info.getFile().getType()); } @Override public boolean traverseDescendents(FileSelectInfo info) throws Exception { return info.getDepth() == 0; } })) { // Only load the specified configuration (ID should match the basename, we allow case-insensitivity) if (f.getName().getBaseName().equalsIgnoreCase(activeLocator.getActiveConfigurationId())) { HadoopConfiguration config = loadHadoopConfiguration(f); if (config != null) { configurations.put(config.getIdentifier(), config); } } } } catch (FileSystemException ex) { throw new ConfigurationException(BaseMessages.getString(PKG, "Error.UnableToLoadConfigurations", baseDir.getName().getFriendlyURI()), ex); } }
From source file:org.pentaho.hadoop.shim.HadoopConfigurationLocator.java
private List<URL> findJarsIn(FileObject path, final int maxdepth, final Set<String> paths) throws FileSystemException { FileObject[] jars = path.findFiles(new FileSelector() { @Override//from w w w .j a v a 2s . com public boolean includeFile(FileSelectInfo info) throws Exception { for (String path : paths) { if (info.getFile().getURL().toString().endsWith(path)) { return false; } } return info.getFile().getName().getBaseName().endsWith(JAR_EXTENSION); } @Override public boolean traverseDescendents(FileSelectInfo info) throws Exception { for (String path : paths) { if (info.getFile().getURL().toString().endsWith(path)) { return false; } } return info.getDepth() <= maxdepth; } }); List<URL> jarUrls = new ArrayList<URL>(); for (FileObject jar : jars) { jarUrls.add(jar.getURL()); } return jarUrls; }
From source file:org.pentaho.hadoop.shim.HadoopConfigurationLocator.java
/** * Create a ClassLoader to load resources for a {@code HadoopConfiguration}. * * @param root Configuration root directory * @param parent Parent class loader to delegate to if resources cannot be found in the configuration's * directory or provided classpath * @param classpathUrls Additional URLs to add to the class loader. These will be added before any internal * resources.//from www.java 2s . c o m * @param ignoredClasses Classes (or packages) that should not be loaded by the class loader * @return A class loader capable of loading a Hadoop configuration located at {@code root}. * @throws ConfigurationException Error creating a class loader for the Hadoop configuration located at {@code root} */ protected ClassLoader createConfigurationLoader(FileObject root, ClassLoader parent, List<URL> classpathUrls, ShimProperties configurationProperties, String... ignoredClasses) throws ConfigurationException { try { if (root == null || !FileType.FOLDER.equals(root.getType())) { throw new IllegalArgumentException("root must be a folder: " + root); } // Find all jar files in the configuration, at most 2 folders deep List<URL> jars = findJarsIn(root, 3, configurationProperties.getConfigSet(SHIM_CLASSPATH_IGNORE)); // Add the root of the configuration jars.add(0, new URL(root.getURL().toExternalForm() + "/")); // Inject any overriding URLs before all other paths if (classpathUrls != null) { jars.addAll(0, classpathUrls); } //Exclude jars contained in exclude.jars property in config.properties file from the list of jars jars = filterJars(jars, configurationProperties.getProperty(CONFIG_PROPERTY_EXCLUDE_JARS)); return new HadoopConfigurationClassLoader(jars.toArray(EMPTY_URL_ARRAY), parent, ignoredClasses); } catch (Exception ex) { throw new ConfigurationException(BaseMessages.getString(PKG, "Error.CreatingClassLoader"), ex); } }
From source file:org.pentaho.hadoop.shim.HadoopConfigurationLocator.java
/** * Parse a set of URLs from a comma-separated list of URLs. If the URL points to a directory all jar files within that * directory will be returned as well./*from w w w. j a va 2s. c om*/ * * @param urlString Comma-separated list of URLs (relative or absolute) * @return List of URLs resolved from {@code urlString} */ protected List<URL> parseURLs(FileObject root, String urlString) { if (urlString == null || urlString.trim().isEmpty()) { return Collections.emptyList(); } String[] paths = urlString.split(","); List<URL> urls = new ArrayList<URL>(); for (String path : paths) { try { FileObject file = root.resolveFile(path.trim()); if (!file.exists()) { file = defaultFsm.resolveFile(path.trim()); } if (FileType.FOLDER.equals(file.getType())) { // Add directories with a trailing / so the URL ClassLoader interprets // them as directories urls.add(new URL(file.getURL().toExternalForm() + "/")); // Also add all jars within this directory urls.addAll(findJarsIn(file, 1, new HashSet<String>())); } else { urls.add(file.getURL()); } } catch (Exception e) { // Log invalid path logger.error(BaseMessages.getString(PKG, "Error.InvalidClasspathEntry", path)); } } return urls; }
From source file:org.pentaho.hadoop.shim.HadoopConfigurationLocatorTest.java
@Test public void parseURLs() throws Exception { HadoopConfigurationLocator locator = new HadoopConfigurationLocator(); FileObject root = VFS.getManager().resolveFile(HADOOP_CONFIGURATIONS_PATH); List<URL> urls = locator.parseURLs(root, "a,b"); assertEquals(2, urls.size());//from ww w . j a va2s .com assertEquals(root.getURL().toURI().resolve("hadoop-configurations/a/"), urls.get(0).toURI()); assertEquals(root.getURL().toURI().resolve("hadoop-configurations/a/a-config.jar"), urls.get(1).toURI()); }
From source file:org.pentaho.metaverse.api.analyzer.kettle.KettleAnalyzerUtil.java
/** * Utility method for normalizing file paths used in Metaverse Id generation. It will convert a valid path into a * consistent path regardless of URI notation or filesystem absolute path. * * @param filePath full path to normalize * @return the normalized path// w w w . jav a 2 s .c o m */ public static String normalizeFilePath(String filePath) throws MetaverseException { try { String path = filePath; FileObject fo = KettleVFS.getFileObject(filePath); try { path = fo.getURL().getPath(); } catch (Throwable t) { // Something went wrong with VFS, just try the filePath } File f = new File(path); return f.getAbsolutePath(); } catch (Exception e) { throw new MetaverseException(e); } }
From source file:org.renjin.primitives.packaging.Namespaces.java
@Internal("find.package") public static StringVector findPackage(@Current Context context, final AtomicVector packageNames) throws FileSystemException { StringArrayVector.Builder result = new StringArrayVector.Builder(); for (int i = 0; i < packageNames.length(); i++) { String packageName = packageNames.getElementAsString(i); Namespace namespace = context.getNamespaceRegistry().getNamespace(context, packageName); FileObject fileObject = namespace.getPackage().resolvePackageRoot(context.getFileSystemManager()); result.add(fileObject.getURL().toString()); }//w ww. ja v a 2 s .co m return result.build(); }
From source file:org.renjin.primitives.System.java
@Internal("system") public static SEXP system(@Current Context context, String command, int flag, SEXP stdin, SEXP stdout, SEXP stderr) throws IOException, InterruptedException { boolean invisible = (flag >= 20 && flag < 29); boolean minimized = (flag >= 10 && flag < 19); List<String> args = parseArgs(command); ProcessBuilder builder = new ProcessBuilder(args); FileObject workingDir = context.getSession().getWorkingDirectory(); if (workingDir instanceof LocalFile) { File localDir = new File(workingDir.getURL().getFile()); builder.directory(localDir);/*from ww w . j a v a 2s. co m*/ } Process process = builder.start(); process.waitFor(); int exitValue = process.exitValue(); return new IntArrayVector(exitValue); }