List of usage examples for org.apache.commons.vfs FileObject getName
public FileName getName();
From source file:org.pentaho.di.job.entries.unzip.JobEntryUnZip.java
private boolean takeThisFile(FileObject sourceFile, String destinationFile) throws FileSystemException { boolean retval = false; File destination = new File(destinationFile); if (!destination.exists()) { if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobUnZip.Log.CanNotFindFile", destinationFile)); }//from w ww. j a v a 2s. com return true; } if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobUnZip.Log.FileExists", destinationFile)); } if (iffileexist == IF_FILE_EXISTS_SKIP) { if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobUnZip.Log.FileSkip", destinationFile)); } return false; } if (iffileexist == IF_FILE_EXISTS_FAIL) { updateErrors(); logError(BaseMessages.getString(PKG, "JobUnZip.Log.FileError", destinationFile, "" + NrErrors)); return false; } if (iffileexist == IF_FILE_EXISTS_OVERWRITE) { if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobUnZip.Log.FileOverwrite", destinationFile)); } return true; } Long entrySize = sourceFile.getContent().getSize(); Long destinationSize = destination.length(); if (iffileexist == IF_FILE_EXISTS_OVERWRITE_DIFF_SIZE) { if (entrySize != destinationSize) { if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobUnZip.Log.FileDiffSize.Diff", sourceFile.getName().getURI(), "" + entrySize, destinationFile, "" + destinationSize)); } return true; } else { if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobUnZip.Log.FileDiffSize.Same", sourceFile.getName().getURI(), "" + entrySize, destinationFile, "" + destinationSize)); } return false; } } if (iffileexist == IF_FILE_EXISTS_OVERWRITE_EQUAL_SIZE) { if (entrySize == destinationSize) { if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobUnZip.Log.FileEqualSize.Same", sourceFile.getName().getURI(), "" + entrySize, destinationFile, "" + destinationSize)); } return true; } else { if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobUnZip.Log.FileEqualSize.Diff", sourceFile.getName().getURI(), "" + entrySize, destinationFile, "" + destinationSize)); } return false; } } if (iffileexist == IF_FILE_EXISTS_OVERWRITE_ZIP_BIG) { if (entrySize > destinationSize) { if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobUnZip.Log.FileBigSize.Big", sourceFile.getName().getURI(), "" + entrySize, destinationFile, "" + destinationSize)); } return true; } else { if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobUnZip.Log.FileBigSize.Small", sourceFile.getName().getURI(), "" + entrySize, destinationFile, "" + destinationSize)); } return false; } } if (iffileexist == IF_FILE_EXISTS_OVERWRITE_ZIP_BIG_EQUAL) { if (entrySize >= destinationSize) { if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobUnZip.Log.FileBigEqualSize.Big", sourceFile.getName().getURI(), "" + entrySize, destinationFile, "" + destinationSize)); } return true; } else { if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobUnZip.Log.FileBigEqualSize.Small", sourceFile.getName().getURI(), "" + entrySize, destinationFile, "" + destinationSize)); } return false; } } if (iffileexist == IF_FILE_EXISTS_OVERWRITE_ZIP_SMALL) { if (entrySize < destinationSize) { if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobUnZip.Log.FileSmallSize.Small", sourceFile.getName().getURI(), "" + entrySize, destinationFile, "" + destinationSize)); } return true; } else { if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobUnZip.Log.FileSmallSize.Big", sourceFile.getName().getURI(), "" + entrySize, destinationFile, "" + destinationSize)); } return false; } } if (iffileexist == IF_FILE_EXISTS_OVERWRITE_ZIP_SMALL_EQUAL) { if (entrySize <= destinationSize) { if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobUnZip.Log.FileSmallEqualSize.Small", sourceFile.getName().getURI(), "" + entrySize, destinationFile, "" + destinationSize)); } return true; } else { if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobUnZip.Log.FileSmallEqualSize.Big", sourceFile.getName().getURI(), "" + entrySize, destinationFile, "" + destinationSize)); } return false; } } if (iffileexist == IF_FILE_EXISTS_UNIQ) { // Create file with unique name return true; } return retval; }
From source file:org.pentaho.di.job.entries.unzip.JobEntryUnZip.java
/** * @param string// w w w .ja v a 2s.c om * the filename from * * @return the calculated target filename */ protected String getTargetFilename(FileObject file) throws FileSystemException { String retval = ""; String filename = file.getName().getPath(); // Replace possible environment variables... if (filename != null) { retval = filename; } if (file.getType() != FileType.FILE) { return retval; } if (!SpecifyFormat && !adddate && !addtime) { return retval; } int lenstring = retval.length(); int lastindexOfDot = retval.lastIndexOf('.'); if (lastindexOfDot == -1) { lastindexOfDot = lenstring; } retval = retval.substring(0, lastindexOfDot); if (daf == null) { daf = new SimpleDateFormat(); } Date timestamp = new Date(); if (addOriginalTimestamp) { timestamp = new Date(file.getContent().getLastModifiedTime()); } if (SpecifyFormat && !Const.isEmpty(date_time_format)) { if (!dateFormatSet) { daf.applyPattern(date_time_format); } String dt = daf.format(timestamp); retval += dt; } else { if (adddate) { if (!dateFormatSet) { daf.applyPattern("yyyyMMdd"); } String d = daf.format(timestamp); retval += "_" + d; } if (addtime) { if (!dateFormatSet) { daf.applyPattern("HHmmssSSS"); } String t = daf.format(timestamp); retval += "_" + t; } } if (daf != null) { dateFormatSet = true; } retval += filename.substring(lastindexOfDot, lenstring); return retval; }
From source file:org.pentaho.di.job.entries.writetofile.JobEntryWriteToFile.java
private void createParentFolder(String realFilename) throws KettleException { FileObject parent = null; try {//from w ww . ja va 2 s . c o m parent = KettleVFS.getFileObject(realFilename).getParent(); if (!parent.exists()) { if (isCreateParentFolder()) { if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JobWriteToFile.Log.ParentFoldetNotExist", parent.getName().toString())); } parent.createFolder(); if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JobWriteToFile.Log.ParentFolderCreated", parent.getName().toString())); } } else { throw new KettleException(BaseMessages.getString(PKG, "JobWriteToFile.Log.ParentFoldetNotExist", parent.getName().toString())); } } } catch (Exception e) { throw new KettleException( BaseMessages.getString(PKG, "JobWriteToFile.Error.CheckingParentFolder", realFilename), e); } finally { if (parent != null) { try { parent.close(); } catch (Exception e) { /* Ignore */ } } } }
From source file:org.pentaho.di.job.entries.zipfile.JobEntryZipFile.java
private boolean createParentFolder(String filename) { // Check for parent folder FileObject parentfolder = null; boolean result = false; try {/* w w w . j av a 2s . c o m*/ // Get parent folder parentfolder = KettleVFS.getFileObject(filename, this).getParent(); if (!parentfolder.exists()) { if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JobEntryZipFile.CanNotFindFolder", "" + parentfolder.getName())); } parentfolder.createFolder(); if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JobEntryZipFile.FolderCreated", "" + parentfolder.getName())); } } else { if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JobEntryZipFile.FolderExists", "" + parentfolder.getName())); } } result = true; } catch (Exception e) { logError(BaseMessages.getString(PKG, "JobEntryZipFile.CanNotCreateFolder", "" + parentfolder.getName()), e); } finally { if (parentfolder != null) { try { parentfolder.close(); parentfolder = null; } catch (Exception ex) { // Ignore } } } return result; }
From source file:org.pentaho.di.job.entries.zipfile.JobEntryZipFile.java
public boolean processRowFile(Job parentJob, Result result, String realZipfilename, String realWildcard, String realWildcardExclude, String realSourceDirectoryOrFile, String realMovetodirectory, boolean createparentfolder) { boolean Fileexists = false; File tempFile = null;//from w ww .j ava 2 s . c o m File fileZip = null; boolean resultat = false; boolean renameOk = false; boolean orginExist = false; // Check if target file/folder exists! FileObject originFile = null; ZipInputStream zin = null; byte[] buffer = null; OutputStream dest = null; BufferedOutputStream buff = null; ZipOutputStream out = null; ZipEntry entry = null; String localSourceFilename = realSourceDirectoryOrFile; try { originFile = KettleVFS.getFileObject(realSourceDirectoryOrFile, this); localSourceFilename = KettleVFS.getFilename(originFile); orginExist = originFile.exists(); } catch (Exception e) { // Ignore errors } finally { if (originFile != null) { try { originFile.close(); } catch (IOException ex) { logError("Error closing file '" + originFile.toString() + "'", ex); } } } String localrealZipfilename = realZipfilename; if (realZipfilename != null && orginExist) { FileObject fileObject = null; try { fileObject = KettleVFS.getFileObject(localrealZipfilename, this); localrealZipfilename = KettleVFS.getFilename(fileObject); // Check if Zip File exists if (fileObject.exists()) { Fileexists = true; if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobZipFiles.Zip_FileExists1.Label") + localrealZipfilename + BaseMessages.getString(PKG, "JobZipFiles.Zip_FileExists2.Label")); } } // Let's see if we need to create parent folder of destination zip filename if (createparentfolder) { createParentFolder(localrealZipfilename); } // Let's start the process now if (ifZipFileExists == 3 && Fileexists) { // the zip file exists and user want to Fail resultat = false; } else if (ifZipFileExists == 2 && Fileexists) { // the zip file exists and user want to do nothing if (addFileToResult) { // Add file to result files name ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, fileObject, parentJob.getJobname(), toString()); result.getResultFiles().put(resultFile.getFile().toString(), resultFile); } resultat = true; } else if (afterZip == 2 && realMovetodirectory == null) { // After Zip, Move files..User must give a destination Folder resultat = false; logError( BaseMessages.getString(PKG, "JobZipFiles.AfterZip_No_DestinationFolder_Defined.Label")); } else { // After Zip, Move files..User must give a destination Folder // Let's see if we deal with file or folder FileObject[] fileList = null; FileObject sourceFileOrFolder = KettleVFS.getFileObject(localSourceFilename); boolean isSourceDirectory = sourceFileOrFolder.getType().equals(FileType.FOLDER); final Pattern pattern; final Pattern patternexclude; if (isSourceDirectory) { // Let's prepare the pattern matcher for performance reasons. // We only do this if the target is a folder ! // if (!Const.isEmpty(realWildcard)) { pattern = Pattern.compile(realWildcard); } else { pattern = null; } if (!Const.isEmpty(realWildcardExclude)) { patternexclude = Pattern.compile(realWildcardExclude); } else { patternexclude = null; } // Target is a directory // Get all the files in the directory... // if (includingSubFolders) { fileList = sourceFileOrFolder.findFiles(new FileSelector() { public boolean traverseDescendents(FileSelectInfo fileInfo) throws Exception { return true; } public boolean includeFile(FileSelectInfo fileInfo) throws Exception { boolean include; // Only include files in the sub-folders... // When we include sub-folders we match the whole filename, not just the base-name // if (fileInfo.getFile().getType().equals(FileType.FILE)) { include = true; if (pattern != null) { String name = fileInfo.getFile().getName().getPath(); include = pattern.matcher(name).matches(); } if (include && patternexclude != null) { String name = fileInfo.getFile().getName().getPath(); include = !pattern.matcher(name).matches(); } } else { include = false; } return include; } }); } else { fileList = sourceFileOrFolder.getChildren(); } } else { pattern = null; patternexclude = null; // Target is a file fileList = new FileObject[] { sourceFileOrFolder }; } if (fileList.length == 0) { resultat = false; logError(BaseMessages.getString(PKG, "JobZipFiles.Log.FolderIsEmpty", localSourceFilename)); } else if (!checkContainsFile(localSourceFilename, fileList, isSourceDirectory)) { resultat = false; logError(BaseMessages.getString(PKG, "JobZipFiles.Log.NoFilesInFolder", localSourceFilename)); } else { if (ifZipFileExists == 0 && Fileexists) { // the zip file exists and user want to create new one with unique name // Format Date // do we have already a .zip at the end? if (localrealZipfilename.toLowerCase().endsWith(".zip")) { // strip this off localrealZipfilename = localrealZipfilename.substring(0, localrealZipfilename.length() - 4); } localrealZipfilename += "_" + StringUtil.getFormattedDateTimeNow(true) + ".zip"; if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobZipFiles.Zip_FileNameChange1.Label") + localrealZipfilename + BaseMessages.getString(PKG, "JobZipFiles.Zip_FileNameChange1.Label")); } } else if (ifZipFileExists == 1 && Fileexists) { // the zip file exists and user want to append // get a temp file fileZip = getFile(localrealZipfilename); tempFile = File.createTempFile(fileZip.getName(), null); // delete it, otherwise we cannot rename existing zip to it. tempFile.delete(); renameOk = fileZip.renameTo(tempFile); if (!renameOk) { logError(BaseMessages.getString(PKG, "JobZipFiles.Cant_Rename_Temp1.Label") + fileZip.getAbsolutePath() + BaseMessages.getString(PKG, "JobZipFiles.Cant_Rename_Temp2.Label") + tempFile.getAbsolutePath() + BaseMessages.getString(PKG, "JobZipFiles.Cant_Rename_Temp3.Label")); } if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobZipFiles.Zip_FileAppend1.Label") + localrealZipfilename + BaseMessages.getString(PKG, "JobZipFiles.Zip_FileAppend2.Label")); } } if (log.isDetailed()) { logDetailed( BaseMessages.getString(PKG, "JobZipFiles.Files_Found1.Label") + fileList.length + BaseMessages.getString(PKG, "JobZipFiles.Files_Found2.Label") + localSourceFilename + BaseMessages.getString(PKG, "JobZipFiles.Files_Found3.Label")); } // Prepare Zip File buffer = new byte[18024]; dest = KettleVFS.getOutputStream(localrealZipfilename, false); buff = new BufferedOutputStream(dest); out = new ZipOutputStream(buff); HashSet<String> fileSet = new HashSet<String>(); if (renameOk) { // User want to append files to existing Zip file // The idea is to rename the existing zip file to a temporary file // and then adds all entries in the existing zip along with the new files, // excluding the zip entries that have the same name as one of the new files. zin = new ZipInputStream(new FileInputStream(tempFile)); entry = zin.getNextEntry(); while (entry != null) { String name = entry.getName(); if (!fileSet.contains(name)) { // Add ZIP entry to output stream. out.putNextEntry(new ZipEntry(name)); // Transfer bytes from the ZIP file to the output file int len; while ((len = zin.read(buffer)) > 0) { out.write(buffer, 0, len); } fileSet.add(name); } entry = zin.getNextEntry(); } // Close the streams zin.close(); } // Set the method out.setMethod(ZipOutputStream.DEFLATED); // Set the compression level if (compressionRate == 0) { out.setLevel(Deflater.NO_COMPRESSION); } else if (compressionRate == 1) { out.setLevel(Deflater.DEFAULT_COMPRESSION); } if (compressionRate == 2) { out.setLevel(Deflater.BEST_COMPRESSION); } if (compressionRate == 3) { out.setLevel(Deflater.BEST_SPEED); } // Specify Zipped files (After that we will move,delete them...) FileObject[] zippedFiles = new FileObject[fileList.length]; int fileNum = 0; // Get the files in the list... for (int i = 0; i < fileList.length && !parentJob.isStopped(); i++) { boolean getIt = true; boolean getItexclude = false; // First see if the file matches the regular expression! // ..only if target is a folder ! if (isSourceDirectory) { // If we include sub-folders, we match on the whole name, not just the basename // String filename; if (includingSubFolders) { filename = fileList[i].getName().getPath(); } else { filename = fileList[i].getName().getBaseName(); } if (pattern != null) { // Matches the base name of the file (backward compatible!) // Matcher matcher = pattern.matcher(filename); getIt = matcher.matches(); } if (patternexclude != null) { Matcher matcherexclude = patternexclude.matcher(filename); getItexclude = matcherexclude.matches(); } } // Get processing File String targetFilename = KettleVFS.getFilename(fileList[i]); if (sourceFileOrFolder.getType().equals(FileType.FILE)) { targetFilename = localSourceFilename; } FileObject file = KettleVFS.getFileObject(targetFilename); boolean isTargetDirectory = file.exists() && file.getType().equals(FileType.FOLDER); if (getIt && !getItexclude && !isTargetDirectory && !fileSet.contains(targetFilename)) { // We can add the file to the Zip Archive if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobZipFiles.Add_FilesToZip1.Label") + fileList[i] + BaseMessages.getString(PKG, "JobZipFiles.Add_FilesToZip2.Label") + localSourceFilename + BaseMessages.getString(PKG, "JobZipFiles.Add_FilesToZip3.Label")); } // Associate a file input stream for the current file InputStream in = KettleVFS.getInputStream(file); // Add ZIP entry to output stream. // String relativeName; String fullName = fileList[i].getName().getPath(); String basePath = sourceFileOrFolder.getName().getPath(); if (isSourceDirectory) { if (fullName.startsWith(basePath)) { relativeName = fullName.substring(basePath.length() + 1); } else { relativeName = fullName; } } else if (isFromPrevious) { int depth = determineDepth(environmentSubstitute(storedSourcePathDepth)); relativeName = determineZipfilenameForDepth(fullName, depth); } else { relativeName = fileList[i].getName().getBaseName(); } out.putNextEntry(new ZipEntry(relativeName)); int len; while ((len = in.read(buffer)) > 0) { out.write(buffer, 0, len); } out.flush(); out.closeEntry(); // Close the current file input stream in.close(); // Get Zipped File zippedFiles[fileNum] = fileList[i]; fileNum = fileNum + 1; } } // Close the ZipOutPutStream out.close(); buff.close(); dest.close(); if (log.isBasic()) { logBasic(BaseMessages.getString(PKG, "JobZipFiles.Log.TotalZippedFiles", "" + zippedFiles.length)); } // Delete Temp File if (tempFile != null) { tempFile.delete(); } // -----Get the list of Zipped Files and Move or Delete Them if (afterZip == 1 || afterZip == 2) { // iterate through the array of Zipped files for (int i = 0; i < zippedFiles.length; i++) { if (zippedFiles[i] != null) { // Delete, Move File FileObject fileObjectd = zippedFiles[i]; if (!isSourceDirectory) { fileObjectd = KettleVFS.getFileObject(localSourceFilename); } // Here we can move, delete files if (afterZip == 1) { // Delete File boolean deleted = fileObjectd.delete(); if (!deleted) { resultat = false; logError(BaseMessages.getString(PKG, "JobZipFiles.Cant_Delete_File1.Label") + localSourceFilename + Const.FILE_SEPARATOR + zippedFiles[i] + BaseMessages .getString(PKG, "JobZipFiles.Cant_Delete_File2.Label")); } // File deleted if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobZipFiles.File_Deleted1.Label") + localSourceFilename + Const.FILE_SEPARATOR + zippedFiles[i] + BaseMessages.getString(PKG, "JobZipFiles.File_Deleted2.Label")); } } else if (afterZip == 2) { // Move File FileObject fileObjectm = null; try { fileObjectm = KettleVFS.getFileObject(realMovetodirectory + Const.FILE_SEPARATOR + fileObjectd.getName().getBaseName()); fileObjectd.moveTo(fileObjectm); } catch (IOException e) { logError( BaseMessages.getString(PKG, "JobZipFiles.Cant_Move_File1.Label") + zippedFiles[i] + BaseMessages.getString(PKG, "JobZipFiles.Cant_Move_File2.Label") + e.getMessage()); resultat = false; } finally { try { if (fileObjectm != null) { fileObjectm.close(); } } catch (Exception e) { if (fileObjectm != null) { logError("Error closing file '" + fileObjectm.toString() + "'", e); } } } // File moved if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobZipFiles.File_Moved1.Label") + zippedFiles[i] + BaseMessages.getString(PKG, "JobZipFiles.File_Moved2.Label")); } } } } } if (addFileToResult) { // Add file to result files name ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, fileObject, parentJob.getJobname(), toString()); result.getResultFiles().put(resultFile.getFile().toString(), resultFile); } resultat = true; } } } catch (Exception e) { logError(BaseMessages.getString(PKG, "JobZipFiles.Cant_CreateZipFile1.Label") + localrealZipfilename + BaseMessages.getString(PKG, "JobZipFiles.Cant_CreateZipFile2.Label"), e); resultat = false; } finally { if (fileObject != null) { try { fileObject.close(); fileObject = null; } catch (IOException ex) { logError("Error closing file '" + fileObject.toString() + "'", ex); } } try { if (out != null) { out.close(); } if (buff != null) { buff.close(); } if (dest != null) { dest.close(); } if (zin != null) { zin.close(); } if (entry != null) { entry = null; } } catch (IOException ex) { logError("Error closing zip file entry for file '" + originFile.toString() + "'", ex); } } } else { resultat = true; if (localrealZipfilename == null) { logError(BaseMessages.getString(PKG, "JobZipFiles.No_ZipFile_Defined.Label")); } if (!orginExist) { logError(BaseMessages.getString(PKG, "JobZipFiles.No_FolderCible_Defined.Label", localSourceFilename)); } } // return a verifier return resultat; }
From source file:org.pentaho.di.job.entries.zipfile.JobEntryZipFile.java
/** * Get the requested part of the filename * * @param filename/* ww w . jav a2 s . c om*/ * the filename (full) (/path/to/a/file.txt) * @param depth * the depth to get. 0 means: the complete filename, 1: the name only (file.txt), 2: one folder (a/file.txt) * 3: two folders (to/a/file.txt) and so on. * @return the requested part of the file name up to a certain depth * @throws KettleFileException */ private String determineZipfilenameForDepth(String filename, int depth) throws KettleException { try { if (Const.isEmpty(filename)) { return null; } if (depth == 0) { return filename; } FileObject fileObject = KettleVFS.getFileObject(filename); FileObject folder = fileObject.getParent(); String baseName = fileObject.getName().getBaseName(); if (depth == 1) { return baseName; } StringBuilder path = new StringBuilder(baseName); int d = 1; while (d < depth && folder != null) { path.insert(0, '/'); path.insert(0, folder.getName().getBaseName()); folder = folder.getParent(); d++; } return path.toString(); } catch (Exception e) { throw new KettleException("Unable to get zip filename '" + filename + "' to depth " + depth, e); } }
From source file:org.pentaho.di.job.Job.java
/** * Sets the internal kettle variables.// w w w . ja v a 2 s . com * * @param var * the new internal kettle variables. */ public void setInternalKettleVariables(VariableSpace var) { if (jobMeta != null && jobMeta.getFilename() != null) // we have a finename that's defined. { try { FileObject fileObject = KettleVFS.getFileObject(jobMeta.getFilename(), this); FileName fileName = fileObject.getName(); // The filename of the transformation var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_NAME, fileName.getBaseName()); // The directory of the transformation FileName fileDir = fileName.getParent(); var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, fileDir.getURI()); } catch (Exception e) { var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, ""); var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_NAME, ""); } } else { var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, ""); var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_NAME, ""); } // The name of the job var.setVariable(Const.INTERNAL_VARIABLE_JOB_NAME, Const.NVL(jobMeta.getName(), "")); // The name of the directory in the repository var.setVariable(Const.INTERNAL_VARIABLE_JOB_REPOSITORY_DIRECTORY, jobMeta.getRepositoryDirectory() != null ? jobMeta.getRepositoryDirectory().getPath() : ""); // Undefine the transformation specific variables var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY, null); var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_NAME, null); var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY, null); var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_NAME, null); var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_NAME, null); var.setVariable(Const.INTERNAL_VARIABLE_TRANSFORMATION_REPOSITORY_DIRECTORY, null); }
From source file:org.pentaho.di.job.Job.java
/** * Send to slave server./*from w ww.j a v a 2 s .c o m*/ * * @param jobMeta * the job meta * @param executionConfiguration * the execution configuration * @param repository * the repository * @param metaStore * the metaStore * @return the string * @throws KettleException * the kettle exception */ public static String sendToSlaveServer(JobMeta jobMeta, JobExecutionConfiguration executionConfiguration, Repository repository, IMetaStore metaStore) throws KettleException { String carteObjectId; SlaveServer slaveServer = executionConfiguration.getRemoteServer(); if (slaveServer == null) { throw new KettleException(BaseMessages.getString(PKG, "Job.Log.NoSlaveServerSpecified")); } if (Const.isEmpty(jobMeta.getName())) { throw new KettleException(BaseMessages.getString(PKG, "Job.Log.UniqueJobName")); } // Align logging levels between execution configuration and remote server slaveServer.getLogChannel().setLogLevel(executionConfiguration.getLogLevel()); try { // Inject certain internal variables to make it more intuitive. // for (String var : Const.INTERNAL_TRANS_VARIABLES) { executionConfiguration.getVariables().put(var, jobMeta.getVariable(var)); } for (String var : Const.INTERNAL_JOB_VARIABLES) { executionConfiguration.getVariables().put(var, jobMeta.getVariable(var)); } if (executionConfiguration.isPassingExport()) { // First export the job... slaveServer.getVariable("MASTER_HOST") // FileObject tempFile = KettleVFS.createTempFile("jobExport", ".zip", System.getProperty("java.io.tmpdir"), jobMeta); TopLevelResource topLevelResource = ResourceUtil.serializeResourceExportInterface( tempFile.getName().toString(), jobMeta, jobMeta, repository, metaStore, executionConfiguration.getXML(), CONFIGURATION_IN_EXPORT_FILENAME); // Send the zip file over to the slave server... // String result = slaveServer.sendExport(topLevelResource.getArchiveName(), AddExportServlet.TYPE_JOB, topLevelResource.getBaseResourceName()); WebResult webResult = WebResult.fromXMLString(result); if (!webResult.getResult().equalsIgnoreCase(WebResult.STRING_OK)) { throw new KettleException("There was an error passing the exported job to the remote server: " + Const.CR + webResult.getMessage()); } carteObjectId = webResult.getId(); } else { String xml = new JobConfiguration(jobMeta, executionConfiguration).getXML(); String reply = slaveServer.sendXML(xml, AddJobServlet.CONTEXT_PATH + "/?xml=Y"); WebResult webResult = WebResult.fromXMLString(reply); if (!webResult.getResult().equalsIgnoreCase(WebResult.STRING_OK)) { throw new KettleException("There was an error posting the job on the remote server: " + Const.CR + webResult.getMessage()); } carteObjectId = webResult.getId(); } // Start the job // String reply = slaveServer.execService(StartJobServlet.CONTEXT_PATH + "/?name=" + URLEncoder.encode(jobMeta.getName(), "UTF-8") + "&xml=Y&id=" + carteObjectId); WebResult webResult = WebResult.fromXMLString(reply); if (!webResult.getResult().equalsIgnoreCase(WebResult.STRING_OK)) { throw new KettleException("There was an error starting the job on the remote server: " + Const.CR + webResult.getMessage()); } return carteObjectId; } catch (KettleException ke) { throw ke; } catch (Exception e) { throw new KettleException(e); } }
From source file:org.pentaho.di.job.JobMeta.java
/** * Sets the internal filename kettle variables. * * @param var//from w ww . java 2 s . c om * the new internal filename kettle variables */ @Override protected void setInternalFilenameKettleVariables(VariableSpace var) { if (filename != null) { // we have a filename that's defined. try { FileObject fileObject = KettleVFS.getFileObject(filename, var); FileName fileName = fileObject.getName(); // The filename of the job var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_NAME, fileName.getBaseName()); // The directory of the job FileName fileDir = fileName.getParent(); var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, fileDir.getURI()); } catch (Exception e) { var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, ""); var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_NAME, ""); } } else { var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, ""); var.setVariable(Const.INTERNAL_VARIABLE_JOB_FILENAME_NAME, ""); } }
From source file:org.pentaho.di.job.JobMeta.java
public String exportResources(VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface namingInterface, Repository repository, IMetaStore metaStore) throws KettleException { String resourceName = null;/*from ww w.jav a 2 s .c o m*/ try { // Handle naming for both repository and XML bases resources... // String baseName; String originalPath; String fullname; String extension = "kjb"; if (Const.isEmpty(getFilename())) { // Assume repository... // originalPath = directory.getPath(); baseName = getName(); fullname = directory.getPath() + (directory.getPath().endsWith(RepositoryDirectory.DIRECTORY_SEPARATOR) ? "" : RepositoryDirectory.DIRECTORY_SEPARATOR) + getName() + "." + extension; // } else { // Assume file // FileObject fileObject = KettleVFS.getFileObject(space.environmentSubstitute(getFilename()), space); originalPath = fileObject.getParent().getName().getPath(); baseName = fileObject.getName().getBaseName(); fullname = fileObject.getName().getPath(); } resourceName = namingInterface.nameResource(baseName, originalPath, extension, ResourceNamingInterface.FileNamingType.JOB); ResourceDefinition definition = definitions.get(resourceName); if (definition == null) { // If we do this once, it will be plenty :-) // JobMeta jobMeta = (JobMeta) this.realClone(false); // All objects get re-located to the root folder // jobMeta.setRepositoryDirectory(new RepositoryDirectory()); // Add used resources, modify transMeta accordingly // Go through the list of steps, etc. // These critters change the steps in the cloned TransMeta // At the end we make a new XML version of it in "exported" // format... // loop over steps, databases will be exported to XML anyway. // for (JobEntryCopy jobEntry : jobMeta.jobcopies) { compatibleJobEntryExportResources(jobEntry.getEntry(), jobMeta, definitions, namingInterface, repository); jobEntry.getEntry().exportResources(jobMeta, definitions, namingInterface, repository, metaStore); } // Set a number of parameters for all the data files referenced so far... // Map<String, String> directoryMap = namingInterface.getDirectoryMap(); if (directoryMap != null) { for (String directory : directoryMap.keySet()) { String parameterName = directoryMap.get(directory); jobMeta.addParameterDefinition(parameterName, directory, "Data file path discovered during export"); } } // At the end, add ourselves to the map... // String jobMetaContent = jobMeta.getXML(); definition = new ResourceDefinition(resourceName, jobMetaContent); // Also remember the original filename (if any), including variables etc. // if (Const.isEmpty(this.getFilename())) { // Repository definition.setOrigin(fullname); } else { definition.setOrigin(this.getFilename()); } definitions.put(fullname, definition); } } catch (FileSystemException e) { throw new KettleException( BaseMessages.getString(PKG, "JobMeta.Exception.AnErrorOccuredReadingJob", getFilename()), e); } catch (KettleFileException e) { throw new KettleException( BaseMessages.getString(PKG, "JobMeta.Exception.AnErrorOccuredReadingJob", getFilename()), e); } return resourceName; }