List of usage examples for org.apache.commons.vfs FileObject getName
public FileName getName();
From source file:org.pentaho.di.trans.steps.sftpput.SFTPPut.java
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException { meta = (SFTPPutMeta) smi;// w ww.ja v a2 s . c om data = (SFTPPutData) sdi; boolean sendToErrorRow = false; String errorMessage = null; Object[] r = getRow(); // get row, set busy! if (r == null) { // no more input to be expected... setOutputDone(); return false; } if (first) { // Go there only for the first row received first = false; try { // String substitution.. String realServerName = environmentSubstitute(meta.getServerName()); String realServerPort = environmentSubstitute(meta.getServerPort()); String realUsername = environmentSubstitute(meta.getUserName()); String realPassword = Encr .decryptPasswordOptionallyEncrypted(environmentSubstitute(meta.getPassword())); String realKeyFilename = null; String realPassPhrase = null; if (meta.isUseKeyFile()) { // We must have here a private keyfilename realKeyFilename = environmentSubstitute(meta.getKeyFilename()); if (Const.isEmpty(realKeyFilename)) { // Error..Missing keyfile logError(BaseMessages.getString(PKG, "SFTPPut.Error.KeyFileMissing")); return false; } if (!KettleVFS.fileExists(realKeyFilename)) { // Error.. can not reach keyfile logError(BaseMessages.getString(PKG, "SFTPPut.Error.KeyFileNotFound", realKeyFilename)); return false; } realPassPhrase = environmentSubstitute(meta.getKeyPassPhrase()); } // Let's try to establish SFTP connection.... // Create sftp client to host ... data.sftpclient = new SFTPClient(InetAddress.getByName(realServerName), Const.toInt(realServerPort, 22), realUsername, realKeyFilename, realPassPhrase); // connection successfully established if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "SFTPPUT.Log.OpenedConnection", realServerName, realServerPort, realUsername)); } // Set compression data.sftpclient.setCompression(meta.getCompression()); // Set proxy? String realProxyHost = environmentSubstitute(meta.getProxyHost()); if (!Const.isEmpty(realProxyHost)) { // Set proxy data.sftpclient.setProxy(realProxyHost, environmentSubstitute(meta.getProxyPort()), environmentSubstitute(meta.getProxyUsername()), environmentSubstitute(meta.getProxyPassword()), meta.getProxyType()); } // login to ftp host ... data.sftpclient.login(realPassword); } catch (Exception e) { throw new KettleException(BaseMessages.getString(PKG, "SFTPPUT.Error.Connection"), e); } // Let's perform some checks // Sourcefilename field String sourceFilenameFieldName = environmentSubstitute(meta.getSourceFileFieldName()); if (Const.isEmpty(sourceFilenameFieldName)) { // source filename field is missing throw new KettleStepException( BaseMessages.getString(PKG, "SFTPPut.Error.SourceFileNameFieldMissing")); } data.indexOfSourceFileFieldName = getInputRowMeta().indexOfValue(sourceFilenameFieldName); if (data.indexOfSourceFileFieldName < -1) { // source filename field is missing throw new KettleStepException( BaseMessages.getString(PKG, "SFTPPut.Error.CanNotFindField", sourceFilenameFieldName)); } // Remote folder fieldname String remoteFoldernameFieldName = environmentSubstitute(meta.getRemoteDirectoryFieldName()); if (Const.isEmpty(remoteFoldernameFieldName)) { // remote folder field is missing throw new KettleStepException( BaseMessages.getString(PKG, "SFTPPut.Error.RemoteFolderNameFieldMissing")); } data.indexOfRemoteDirectory = getInputRowMeta().indexOfValue(remoteFoldernameFieldName); if (data.indexOfRemoteDirectory < -1) { // remote foldername field is missing throw new KettleStepException( BaseMessages.getString(PKG, "SFTPPut.Error.CanNotFindField", remoteFoldernameFieldName)); } // Move to folder if (meta.getAfterFTPS() == JobEntrySFTPPUT.AFTER_FTPSPUT_MOVE) { String realDestinationFoldernameFieldName = environmentSubstitute( meta.getDestinationFolderFieldName()); if (Const.isEmpty(realDestinationFoldernameFieldName)) { throw new KettleStepException( BaseMessages.getString(PKG, "SFTPPut.Log.DestinatFolderNameFieldNameMissing")); } data.indexOfMoveToFolderFieldName = getInputRowMeta() .indexOfValue(realDestinationFoldernameFieldName); if (data.indexOfMoveToFolderFieldName < -1) { // move to folder field is missing throw new KettleStepException(BaseMessages.getString(PKG, "SFTPPut.Error.CanNotFindField", realDestinationFoldernameFieldName)); } } } // Read data top upload String sourceData = getInputRowMeta().getString(r, data.indexOfSourceFileFieldName); InputStream inputStream = null; FileObject destinationFolder = null; FileObject file = null; try { if (Const.isEmpty(sourceData)) { // Source data is empty throw new KettleStepException(BaseMessages.getString(PKG, "SFTPPut.Error.SourceDataEmpty")); } if (meta.isInputStream()) { // Source data is a stream inputStream = new ByteArrayInputStream(sourceData.getBytes()); } else { // source data is a file // let's check file file = KettleVFS.getFileObject(sourceData); if (!file.exists()) { // We can not find file throw new KettleStepException( BaseMessages.getString(PKG, "SFTPPut.Error.CanNotFindField", sourceData)); } // get stream from file inputStream = KettleVFS.getInputStream(file); } if (file != null) { if (meta.getAfterFTPS() == JobEntrySFTPPUT.AFTER_FTPSPUT_MOVE) { String realDestationFolder = getInputRowMeta().getString(r, data.indexOfMoveToFolderFieldName); if (Const.isEmpty(realDestationFolder)) { // Move to destination folder is empty throw new KettleStepException( BaseMessages.getString(PKG, "SFTPPut.Error.MoveToDestinationFolderIsEmpty")); } destinationFolder = KettleVFS.getFileObject(realDestationFolder); if (!destinationFolder.exists()) { // We can not find folder throw new KettleStepException( BaseMessages.getString(PKG, "SFTPPut.Error.CanNotFindFolder", realDestationFolder)); } } } // move to spool dir ... setSFTPDirectory(getInputRowMeta().getString(r, data.indexOfRemoteDirectory)); // Destination filename String destinationFilename = file.getName().getBaseName(); // Upload a stream data.sftpclient.put(inputStream, destinationFilename); if (file != null) { // We successfully uploaded the file // what's next ... finishTheJob(file, sourceData, destinationFolder); } putRow(getInputRowMeta(), r); // copy row to possible alternate rowset(s). if (checkFeedback(getLinesRead())) { if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "SFTPPut.Log.LineNumber") + getLinesRead()); } } } catch (Exception e) { if (getStepMeta().isDoingErrorHandling()) { sendToErrorRow = true; errorMessage = e.toString(); } else { logError(BaseMessages.getString(PKG, "SFTPPut.Log.ErrorInStep"), e); setErrors(1); stopAll(); setOutputDone(); // signal end to receiver(s) return false; } if (sendToErrorRow) { // Simply add this row to the error row putError(getInputRowMeta(), r, 1, errorMessage, null, "SFTPPUT001"); } } finally { try { if (destinationFolder != null) { destinationFolder.close(); } if (file != null) { file.close(); } if (inputStream != null) { inputStream.close(); } } catch (Exception e) { // ignore this } } return true; }
From source file:org.pentaho.di.trans.steps.sftpput.SFTPPut.java
protected void finishTheJob(FileObject file, String sourceData, FileObject destinationFolder) throws KettleException { try {//from w ww. j a v a2 s .co m switch (meta.getAfterFTPS()) { case JobEntrySFTPPUT.AFTER_FTPSPUT_DELETE: // Delete source file if (!file.exists()) { file.delete(); if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "SFTPPut.Log.DeletedFile", sourceData)); } } break; case JobEntrySFTPPUT.AFTER_FTPSPUT_MOVE: // Move source file FileObject destination = null; try { destination = KettleVFS.getFileObject(destinationFolder.getName().getBaseName() + Const.FILE_SEPARATOR + file.getName().getBaseName(), this); file.moveTo(destination); if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "SFTPPut.Log.FileMoved", file, destination)); } } finally { if (destination != null) { destination.close(); } } break; default: if (meta.isAddFilenameResut()) { // Add this to the result file names... ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, file, getTransMeta().getName(), getStepname()); resultFile .setComment(BaseMessages.getString(PKG, "SFTPPut.Log.FilenameAddedToResultFilenames")); addResultFile(resultFile); if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "SFTPPut.Log.FilenameAddedToResultFilenames", sourceData)); } } break; } } catch (Exception e) { throw new KettleException(e); } }
From source file:org.pentaho.di.trans.steps.sqlfileoutput.SQLFileOutput.java
public boolean init(StepMetaInterface smi, StepDataInterface sdi) { meta = (SQLFileOutputMeta) smi;//from w ww. j ava 2 s .com data = (SQLFileOutputData) sdi; if (super.init(smi, sdi)) { try { if (meta.getDatabaseMeta() == null) { throw new KettleStepException("The connection is not defined (empty)"); } if (meta.getDatabaseMeta() == null) { logError(BaseMessages.getString(PKG, "SQLFileOutput.Init.ConnectionMissing", getStepname())); return false; } data.db = new Database(this, meta.getDatabaseMeta()); data.db.shareVariablesWith(this); logBasic("Connected to database [" + meta.getDatabaseMeta() + "]"); if (meta.isCreateParentFolder()) { // Check for parent folder FileObject parentfolder = null; try { // Get parent folder String filename = environmentSubstitute(meta.getFileName()); parentfolder = KettleVFS.getFileObject(filename, getTransMeta()).getParent(); if (!parentfolder.exists()) { log.logBasic("Folder parent", "Folder parent " + parentfolder.getName() + " does not exist !"); parentfolder.createFolder(); log.logBasic("Folder parent", "Folder parent was created."); } } catch (Exception e) { logError("Couldn't created parent folder " + parentfolder.getName()); setErrors(1L); stopAll(); } finally { if (parentfolder != null) { try { parentfolder.close(); } catch (Exception ex) { /* Ignore */ } } } } if (!meta.isDoNotOpenNewFileInit()) { if (!openNewFile()) { logError("Couldn't open file [" + buildFilename() + "]"); setErrors(1L); stopAll(); } } tableName = environmentSubstitute(meta.getTablename()); schemaName = environmentSubstitute(meta.getSchemaName()); if (Const.isEmpty(tableName)) { throw new KettleStepException("The tablename is not defined (empty)"); } schemaTable = data.db.getDatabaseMeta().getQuotedSchemaTableCombination(schemaName, tableName); } catch (Exception e) { logError("An error occurred intialising this step: " + e.getMessage()); stopAll(); setErrors(1); } return true; } return false; }
From source file:org.pentaho.di.trans.steps.symmetriccrypto.symmetricalgorithm.SymmetricCrypto.java
public void setSecretKeyFromFile(String filename) throws CryptoKeyException { FileObject file = null; try {/*from w w w . j av a 2 s .co m*/ file = KettleVFS.getFileObject(filename); if (!file.exists()) { throw new CryptoException( BaseMessages.getString(PKG, "SymmetricCrypto.CanNotFindFile", file.getName())); } byte[] KeyBytes = new byte[(int) file.getContent().getSize()]; setSecretKey(KeyBytes); } catch (Exception e) { throw new CryptoKeyException(e); } finally { if (file != null) { try { file.close(); } catch (Exception e) { /* Ignore */ } } } }
From source file:org.pentaho.di.trans.steps.textfileoutput.TextFileOutput.java
private void createParentFolder(String filename) throws Exception { // Check for parent folder FileObject parentfolder = null; try {/*from ww w. jav a 2s. c om*/ // Get parent folder parentfolder = getFileObject(filename).getParent(); if (parentfolder.exists()) { if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "TextFileOutput.Log.ParentFolderExist", parentfolder.getName())); } } else { if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "TextFileOutput.Log.ParentFolderNotExist", parentfolder.getName())); } if (meta.isCreateParentFolder()) { parentfolder.createFolder(); if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "TextFileOutput.Log.ParentFolderCreated", parentfolder.getName())); } } else { throw new KettleException(BaseMessages.getString(PKG, "TextFileOutput.Log.ParentFolderNotExistCreateIt", parentfolder.getName(), filename)); } } } finally { if (parentfolder != null) { try { parentfolder.close(); } catch (Exception ex) { // Ignore } } } }
From source file:org.pentaho.di.trans.Trans.java
/** * Send the transformation for execution to a Carte slave server. * * @param transMeta/*from w w w . j av a2 s . c o m*/ * the transformation meta-data * @param executionConfiguration * the transformation execution configuration * @param repository * the repository * @return The Carte object ID on the server. * @throws KettleException * if any errors occur during the dispatch to the slave server */ public static String sendToSlaveServer(TransMeta transMeta, TransExecutionConfiguration executionConfiguration, Repository repository, IMetaStore metaStore) throws KettleException { String carteObjectId; SlaveServer slaveServer = executionConfiguration.getRemoteServer(); if (slaveServer == null) { throw new KettleException("No slave server specified"); } if (Const.isEmpty(transMeta.getName())) { throw new KettleException( "The transformation needs a name to uniquely identify it by on the remote server."); } try { // Inject certain internal variables to make it more intuitive. // Map<String, String> vars = new HashMap<String, String>(); for (String var : Const.INTERNAL_TRANS_VARIABLES) { vars.put(var, transMeta.getVariable(var)); } for (String var : Const.INTERNAL_JOB_VARIABLES) { vars.put(var, transMeta.getVariable(var)); } executionConfiguration.getVariables().putAll(vars); slaveServer.injectVariables(executionConfiguration.getVariables()); slaveServer.getLogChannel().setLogLevel(executionConfiguration.getLogLevel()); if (executionConfiguration.isPassingExport()) { // First export the job... // FileObject tempFile = KettleVFS.createTempFile("transExport", ".zip", System.getProperty("java.io.tmpdir"), transMeta); TopLevelResource topLevelResource = ResourceUtil.serializeResourceExportInterface( tempFile.getName().toString(), transMeta, transMeta, repository, metaStore, executionConfiguration.getXML(), CONFIGURATION_IN_EXPORT_FILENAME); // Send the zip file over to the slave server... // String result = slaveServer.sendExport(topLevelResource.getArchiveName(), AddExportServlet.TYPE_TRANS, topLevelResource.getBaseResourceName()); WebResult webResult = WebResult.fromXMLString(result); if (!webResult.getResult().equalsIgnoreCase(WebResult.STRING_OK)) { throw new KettleException( "There was an error passing the exported transformation to the remote server: " + Const.CR + webResult.getMessage()); } carteObjectId = webResult.getId(); } else { // Now send it off to the remote server... // String xml = new TransConfiguration(transMeta, executionConfiguration).getXML(); String reply = slaveServer.sendXML(xml, AddTransServlet.CONTEXT_PATH + "/?xml=Y"); WebResult webResult = WebResult.fromXMLString(reply); if (!webResult.getResult().equalsIgnoreCase(WebResult.STRING_OK)) { throw new KettleException("There was an error posting the transformation on the remote server: " + Const.CR + webResult.getMessage()); } carteObjectId = webResult.getId(); } // Prepare the transformation // String reply = slaveServer.execService(PrepareExecutionTransServlet.CONTEXT_PATH + "/?name=" + URLEncoder.encode(transMeta.getName(), "UTF-8") + "&xml=Y&id=" + carteObjectId); WebResult webResult = WebResult.fromXMLString(reply); if (!webResult.getResult().equalsIgnoreCase(WebResult.STRING_OK)) { throw new KettleException( "There was an error preparing the transformation for excution on the remote server: " + Const.CR + webResult.getMessage()); } // Start the transformation // reply = slaveServer.execService(StartExecutionTransServlet.CONTEXT_PATH + "/?name=" + URLEncoder.encode(transMeta.getName(), "UTF-8") + "&xml=Y&id=" + carteObjectId); webResult = WebResult.fromXMLString(reply); if (!webResult.getResult().equalsIgnoreCase(WebResult.STRING_OK)) { throw new KettleException("There was an error starting the transformation on the remote server: " + Const.CR + webResult.getMessage()); } return carteObjectId; } catch (KettleException ke) { throw ke; } catch (Exception e) { throw new KettleException(e); } }
From source file:org.pentaho.di.trans.Trans.java
/** * Sets the internal kettle variables.//from w w w .jav a 2 s. co m * * @param var * the new internal kettle variables */ public void setInternalKettleVariables(VariableSpace var) { if (transMeta != null && !Const.isEmpty(transMeta.getFilename())) // we have a finename that's defined. { try { FileObject fileObject = KettleVFS.getFileObject(transMeta.getFilename(), var); FileName fileName = fileObject.getName(); // The filename of the transformation variables.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_NAME, fileName.getBaseName()); // The directory of the transformation FileName fileDir = fileName.getParent(); variables.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY, fileDir.getURI()); } catch (KettleFileException e) { variables.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY, ""); variables.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_NAME, ""); } } else { variables.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY, ""); variables.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_NAME, ""); } // The name of the transformation variables.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_NAME, Const.NVL(transMeta.getName(), "")); // TODO PUT THIS INSIDE OF THE "IF" // The name of the directory in the repository variables.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_REPOSITORY_DIRECTORY, transMeta.getRepositoryDirectory() != null ? transMeta.getRepositoryDirectory().getPath() : ""); // Here we don't clear the definition of the job specific parameters, as they may come in handy. // A transformation can be called from a job and may inherit the job internal variables // but the other around is not possible. }
From source file:org.pentaho.di.trans.TransMeta.java
/** * Sets the internal filename kettle variables. * * @param var/*w w w . j a v a 2 s . co m*/ * the new internal filename kettle variables */ protected void setInternalFilenameKettleVariables(VariableSpace var) { // If we have a filename that's defined, set variables. If not, clear them. // if (!Const.isEmpty(filename)) { try { FileObject fileObject = KettleVFS.getFileObject(filename, var); FileName fileName = fileObject.getName(); // The filename of the transformation var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_NAME, fileName.getBaseName()); // The directory of the transformation FileName fileDir = fileName.getParent(); var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY, fileDir.getURI()); } catch (KettleFileException e) { log.logError("Unexpected error setting internal filename variables!", e); var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY, ""); var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_NAME, ""); } } else { var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY, ""); var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_NAME, ""); } }
From source file:org.pentaho.di.trans.TransMeta.java
/** * Exports the specified objects to a flat-file system, adding content with filename keys to a set of definitions. The * supplied resource naming interface allows the object to name appropriately without worrying about those parts of * the implementation specific details.//from w w w .j a v a2 s . co m * * @param space * the variable space to use * @param definitions * @param resourceNamingInterface * @param repository * The repository to optionally load other resources from (to be converted to XML) * @param metaStore * the metaStore in which non-kettle metadata could reside. * * @return the filename of the exported resource */ public String exportResources(VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore) throws KettleException { try { // Handle naming for both repository and XML bases resources... // String baseName; String originalPath; String fullname; String extension = "ktr"; if (Const.isEmpty(getFilename())) { // Assume repository... // originalPath = directory.getPath(); baseName = getName(); fullname = directory.getPath() + (directory.getPath().endsWith(RepositoryDirectory.DIRECTORY_SEPARATOR) ? "" : RepositoryDirectory.DIRECTORY_SEPARATOR) + getName() + "." + extension; // } else { // Assume file // FileObject fileObject = KettleVFS.getFileObject(space.environmentSubstitute(getFilename()), space); originalPath = fileObject.getParent().getURL().toString(); baseName = fileObject.getName().getBaseName(); fullname = fileObject.getURL().toString(); } String exportFileName = resourceNamingInterface.nameResource(baseName, originalPath, extension, ResourceNamingInterface.FileNamingType.TRANSFORMATION); ResourceDefinition definition = definitions.get(exportFileName); if (definition == null) { // If we do this once, it will be plenty :-) // TransMeta transMeta = (TransMeta) this.realClone(false); // transMeta.copyVariablesFrom(space); // Add used resources, modify transMeta accordingly // Go through the list of steps, etc. // These critters change the steps in the cloned TransMeta // At the end we make a new XML version of it in "exported" // format... // loop over steps, databases will be exported to XML anyway. // for (StepMeta stepMeta : transMeta.getSteps()) { stepMeta.exportResources(space, definitions, resourceNamingInterface, repository, metaStore); } // Change the filename, calling this sets internal variables // inside of the transformation. // transMeta.setFilename(exportFileName); // All objects get re-located to the root folder // transMeta.setRepositoryDirectory(new RepositoryDirectory()); // Set a number of parameters for all the data files referenced so far... // Map<String, String> directoryMap = resourceNamingInterface.getDirectoryMap(); if (directoryMap != null) { for (String directory : directoryMap.keySet()) { String parameterName = directoryMap.get(directory); transMeta.addParameterDefinition(parameterName, directory, "Data file path discovered during export"); } } // At the end, add ourselves to the map... // String transMetaContent = transMeta.getXML(); definition = new ResourceDefinition(exportFileName, transMetaContent); // Also remember the original filename (if any), including variables etc. // if (Const.isEmpty(this.getFilename())) { // Repository definition.setOrigin(fullname); } else { definition.setOrigin(this.getFilename()); } definitions.put(fullname, definition); } return exportFileName; } catch (FileSystemException e) { throw new KettleException(BaseMessages.getString(PKG, "TransMeta.Exception.ErrorOpeningOrValidatingTheXMLFile", getFilename()), e); } catch (KettleFileException e) { throw new KettleException(BaseMessages.getString(PKG, "TransMeta.Exception.ErrorOpeningOrValidatingTheXMLFile", getFilename()), e); } }
From source file:org.pentaho.di.ui.i18n.MessagesSourceCrawler.java
public void crawl() throws Exception { for (final String sourceDirectory : sourceDirectories) { FileObject folder = KettleVFS.getFileObject(sourceDirectory); FileObject[] javaFiles = folder.findFiles(new FileSelector() { @Override//from w w w. j a v a2 s. c om public boolean traverseDescendents(FileSelectInfo info) throws Exception { return true; } @Override public boolean includeFile(FileSelectInfo info) throws Exception { return info.getFile().getName().getExtension().equals("java"); } }); for (FileObject javaFile : javaFiles) { /** * We don't want the Messages.java files, there is nothing in there for us. */ boolean skip = false; for (String filename : filesToAvoid) { if (javaFile.getName().getBaseName().equals(filename)) { skip = true; } } if (skip) { continue; // don't process this file. } // For each of these files we look for keys... // lookForOccurrencesInFile(sourceDirectory, javaFile); } } // Also search for keys in the XUL files... // for (SourceCrawlerXMLFolder xmlFolder : xmlFolders) { String[] xmlDirs = { xmlFolder.getFolder(), }; String[] xmlMasks = { xmlFolder.getWildcard(), }; String[] xmlReq = { "N", }; boolean[] xmlSubdirs = { true, }; // search sub-folders too FileInputList xulFileInputList = FileInputList.createFileList(new Variables(), xmlDirs, xmlMasks, xmlReq, xmlSubdirs); for (FileObject fileObject : xulFileInputList.getFiles()) { try { Document doc = XMLHandler.loadXMLFile(fileObject); // Scan for elements and tags in this file... // for (SourceCrawlerXMLElement xmlElement : xmlFolder.getElements()) { addLabelOccurrences(xmlFolder.getDefaultSourceFolder(), fileObject, doc.getElementsByTagName(xmlElement.getSearchElement()), xmlFolder.getKeyPrefix(), xmlElement.getKeyTag(), xmlElement.getKeyAttribute(), xmlFolder.getDefaultPackage(), xmlFolder.getPackageExceptions()); } } catch (KettleXMLException e) { log.logError("Unable to open XUL / XML document: " + fileObject); } } } }