List of usage examples for org.apache.commons.vfs2 FileObject toString
public String toString()
From source file:org.luwrain.app.commander.InfoAndProperties.java
void fillDirInfo(FileObject fileObj, MutableLines lines) { NullCheck.notNull(fileObj, "fileObj"); NullCheck.notNull(lines, "lines"); lines.addLine("URL: " + fileObj.toString()); lines.addLine(""); }
From source file:org.metaborg.intellij.idea.projects.ModuleBuilderUtils.java
/** * Gets a list of source paths./* w w w . j a v a2 s . c o m*/ * * @return A list of (path, packagePrefix) pairs. * @throws ConfigurationException */ @Nullable public static List<Pair<String, String>> getSourcePaths(final LanguageIdentifier languageIdentifier, final FileObject contentEntry) throws ConfigurationException { final SpoofaxLangSpecCommonPaths paths = new SpoofaxLangSpecCommonPaths(contentEntry); final List<Pair<String, String>> sourcePaths = new ArrayList<>(); for (final FileObject javaSrcDir : paths.javaSrcDirs(languageIdentifier.id)) { sourcePaths.add(Pair.create(javaSrcDir.toString(), "")); } return sourcePaths; }
From source file:org.pentaho.di.trans.steps.enhanced.jsoninput.JsonInput.java
@Override protected void fillFileAdditionalFields(JsonInputData data, FileObject file) throws FileSystemException { super.fillFileAdditionalFields(data, file); data.filename = KettleVFS.getFilename(file); data.filenr++;/*from www.ja v a 2s .c o m*/ if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JsonInput.Log.OpeningFile", file.toString())); } addFileToResultFilesname(file); }
From source file:org.pentaho.di.trans.steps.file.BaseFileInputStep.java
/** * Read files from previous step.//from w w w .j a va2s.co m */ private RowMetaInterface[] filesFromPreviousStep() throws KettleException { RowMetaInterface[] infoStep = null; data.files.getFiles().clear(); int idx = -1; RowSet rowSet = findInputRowSet(meta.inputFiles.acceptingStepName); Object[] fileRow = getRowFrom(rowSet); while (fileRow != null) { RowMetaInterface prevInfoFields = rowSet.getRowMeta(); if (idx < 0) { if (meta.inputFiles.passingThruFields) { data.passThruFields = new HashMap<String, Object[]>(); infoStep = new RowMetaInterface[] { prevInfoFields }; data.nrPassThruFields = prevInfoFields.size(); } idx = prevInfoFields.indexOfValue(meta.inputFiles.acceptingField); if (idx < 0) { logError(BaseMessages.getString(PKG, "BaseFileInputStep.Log.Error.UnableToFindFilenameField", meta.inputFiles.acceptingField)); setErrors(getErrors() + 1); stopAll(); return null; } } String fileValue = prevInfoFields.getString(fileRow, idx); try { FileObject fileObject = KettleVFS.getFileObject(fileValue, getTransMeta()); data.files.addFile(fileObject); if (meta.inputFiles.passingThruFields) { StringBuilder sb = new StringBuilder(); sb.append(data.files.nrOfFiles() > 0 ? data.files.nrOfFiles() - 1 : 0).append("_") .append(fileObject.toString()); data.passThruFields.put(sb.toString(), fileRow); } } catch (KettleFileException e) { logError(BaseMessages.getString(PKG, "BaseFileInputStep.Log.Error.UnableToCreateFileObject", fileValue), e); } // Grab another row fileRow = getRowFrom(rowSet); } if (data.files.nrOfFiles() == 0) { if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "BaseFileInputStep.Log.Error.NoFilesSpecified")); } return null; } return infoStep; }
From source file:org.pentaho.hadoop.PluginPropertiesUtil.java
/** * Loads a properties file from the plugin directory for the plugin interface provided * * @param plugin// ww w . j a v a2 s . c om * @return * @throws KettleFileException * @throws IOException */ protected Properties loadProperties(PluginInterface plugin, String relativeName) throws KettleFileException, IOException { if (plugin == null) { throw new NullPointerException(); } FileObject propFile = KettleVFS .getFileObject(plugin.getPluginDirectory().getPath() + Const.FILE_SEPARATOR + relativeName); if (!propFile.exists()) { throw new FileNotFoundException(propFile.toString()); } try { return new PropertiesConfigurationProperties(propFile); } catch (Exception e) { // Do not catch ConfigurationException. Different shims will use different // packages for this exception. throw new IOException(e); } }
From source file:org.pentaho.hadoop.shim.HadoopExcludeJarsTest.java
@Test public void filterJars_null_arg_excludedJarsProperty() throws Exception { HadoopConfigurationLocator locator = new HadoopConfigurationLocator(); FileObject root = VFS.getManager().resolveFile(HADOOP_CONFIGURATIONS_PATH); List<URL> urls = locator.parseURLs(root, root.toString()); count = urls.size();//ww w .j a v a2 s.co m List<URL> list = locator.filterJars(urls, null); assertEquals(count, list.size()); }
From source file:org.pentaho.hadoop.shim.HadoopExcludeJarsTest.java
@Test public void filterJars_arg_excludedJarsProperty_emptyString() throws Exception { HadoopConfigurationLocator locator = new HadoopConfigurationLocator(); FileObject root = VFS.getManager().resolveFile(HADOOP_CONFIGURATIONS_PATH); List<URL> urls = locator.parseURLs(root, root.toString()); count = urls.size();/*from w w w. ja v a 2s . co m*/ List<URL> list = locator.filterJars(urls, ""); assertEquals(count, list.size()); }
From source file:org.pentaho.hadoop.shim.HadoopExcludeJarsTest.java
@Test public void filterJars_arg_urls_containsOnlyExcludedJars() throws Exception { HadoopConfigurationLocator locator = new HadoopConfigurationLocator(); FileObject root = VFS.getManager().resolveFile(HADOOP_CONFIGURATIONS_PATH); List<URL> urls = locator.parseURLs(root, root.toString()); Iterator<URL> iterator = urls.listIterator(); while (iterator.hasNext()) { URL url = iterator.next(); if (FileType.FOLDER.equals(root.resolveFile(url.toString().trim()).getType())) { iterator.remove();//from w ww . jav a2 s . c o m } } List<URL> list = locator.filterJars(urls, "xercesImpl,xml-apis-1.3.04.jar,xml-apis-ext-1.3.04,xerces-version-1.8.0,xercesImpl2-2.9.1," + "pentaho-hadoop-shims-api-61.2016.04.01-196,commands-3.3.0-I20070605-0010,postgresql,trilead-ssh2-build213" + ".jar,trilead-ssh2-build215.jar"); assertEquals(0, list.size()); }
From source file:org.pentaho.hadoop.shim.HadoopExcludeJarsTest.java
@Test public void filterJars_removeOnlyXercesImpl() throws Exception { HadoopConfigurationLocator locator = new HadoopConfigurationLocator(); FileObject root = VFS.getManager().resolveFile(HADOOP_CONFIGURATIONS_PATH); List<URL> urls = locator.parseURLs(root, root.toString()); count = urls.size();/*from w w w .ja v a 2 s.c o m*/ List<URL> list = locator.filterJars(urls, "xercesImpl"); assertEquals(count - 1, list.size()); }
From source file:org.pentaho.hadoop.shim.HadoopExcludeJarsTest.java
@Test public void filterJars_removeOnlyByArtifactIdTemplate() throws Exception { HadoopConfigurationLocator locator = new HadoopConfigurationLocator(); FileObject root = VFS.getManager().resolveFile(HADOOP_CONFIGURATIONS_PATH); List<URL> urls = locator.parseURLs(root, root.toString()); count = urls.size();//w w w . j ava2 s. c om List<URL> list = locator.filterJars(urls, "pentaho-hadoop-shims-api"); assertEquals(count - 1, list.size()); }