List of usage examples for org.apache.commons.vfs FileObject getParent
public FileObject getParent() throws FileSystemException;
From source file:org.pentaho.di.job.entries.pgpencryptfiles.JobEntryPGPEncryptFiles.java
private boolean CreateDestinationFolder(FileObject filefolder) { FileObject folder = null;//from w ww .j a v a 2 s . com try { if (destination_is_a_file) { folder = filefolder.getParent(); } else { folder = filefolder; } if (!folder.exists()) { if (create_destination_folder) { if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JobPGPEncryptFiles.Log.FolderNotExist", folder.getName().toString())); } folder.createFolder(); if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JobPGPEncryptFiles.Log.FolderWasCreated", folder.getName().toString())); } } else { logError(BaseMessages.getString(PKG, "JobPGPEncryptFiles.Log.FolderNotExist", folder.getName().toString())); return false; } } return true; } catch (Exception e) { logError(BaseMessages.getString(PKG, "JobPGPEncryptFiles.Log.CanNotCreateParentFolder", folder.getName().toString()), e); } finally { if (folder != null) { try { folder.close(); } catch (Exception ex) { /* Ignore */ } } } return false; }
From source file:org.pentaho.di.job.entries.unzip.JobEntryUnZip.java
private boolean unzipFile(FileObject sourceFileObject, String realTargetdirectory, String realWildcard, String realWildcardExclude, Result result, Job parentJob, FileObject fileObject, FileObject movetodir, String realMovetodirectory) { boolean retval = false; String unzipToFolder = realTargetdirectory; try {//from w ww . ja va2 s . co m if (log.isDetailed()) { logDetailed( BaseMessages.getString(PKG, "JobUnZip.Log.ProcessingFile", sourceFileObject.toString())); } // Do you create a root folder? // if (rootzip) { String shortSourceFilename = sourceFileObject.getName().getBaseName(); int lenstring = shortSourceFilename.length(); int lastindexOfDot = shortSourceFilename.lastIndexOf('.'); if (lastindexOfDot == -1) { lastindexOfDot = lenstring; } String foldername = realTargetdirectory + "/" + shortSourceFilename.substring(0, lastindexOfDot); FileObject rootfolder = KettleVFS.getFileObject(foldername, this); if (!rootfolder.exists()) { try { rootfolder.createFolder(); if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JobUnZip.Log.RootFolderCreated", foldername)); } } catch (Exception e) { throw new Exception( BaseMessages.getString(PKG, "JobUnZip.Error.CanNotCreateRootFolder", foldername), e); } } unzipToFolder = foldername; } // Try to read the entries from the VFS object... // String zipFilename = "zip:" + sourceFileObject.getName().getFriendlyURI(); FileObject zipFile = KettleVFS.getFileObject(zipFilename, this); FileObject[] items = zipFile.findFiles(new AllFileSelector() { public boolean traverseDescendents(FileSelectInfo info) { return true; } public boolean includeFile(FileSelectInfo info) { // Never return the parent directory of a file list. if (info.getDepth() == 0) { return false; } FileObject fileObject = info.getFile(); return fileObject != null; } }); Pattern pattern = null; if (!Const.isEmpty(realWildcard)) { pattern = Pattern.compile(realWildcard); } Pattern patternexclude = null; if (!Const.isEmpty(realWildcardExclude)) { patternexclude = Pattern.compile(realWildcardExclude); } for (FileObject item : items) { if (successConditionBroken) { if (!successConditionBrokenExit) { logError(BaseMessages.getString(PKG, "JobUnZip.Error.SuccessConditionbroken", "" + NrErrors)); successConditionBrokenExit = true; } return false; } synchronized (KettleVFS.getInstance().getFileSystemManager()) { FileObject newFileObject = null; try { if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JobUnZip.Log.ProcessingZipEntry", item.getName().getURI(), sourceFileObject.toString())); } // get real destination filename // String newFileName = unzipToFolder + Const.FILE_SEPARATOR + getTargetFilename(item); newFileObject = KettleVFS.getFileObject(newFileName, this); if (item.getType().equals(FileType.FOLDER)) { // Directory // if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JobUnZip.CreatingDirectory.Label", newFileName)); } // Create Directory if necessary ... // if (!newFileObject.exists()) { newFileObject.createFolder(); } } else { // File // boolean getIt = true; boolean getItexclude = false; // First see if the file matches the regular expression! // if (pattern != null) { Matcher matcher = pattern.matcher(item.getName().getURI()); getIt = matcher.matches(); } if (patternexclude != null) { Matcher matcherexclude = patternexclude.matcher(item.getName().getURI()); getItexclude = matcherexclude.matches(); } boolean take = takeThisFile(item, newFileName); if (getIt && !getItexclude && take) { if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JobUnZip.ExtractingEntry.Label", item.getName().getURI(), newFileName)); } if (iffileexist == IF_FILE_EXISTS_UNIQ) { // Create file with unique name int lenstring = newFileName.length(); int lastindexOfDot = newFileName.lastIndexOf('.'); if (lastindexOfDot == -1) { lastindexOfDot = lenstring; } newFileName = newFileName.substring(0, lastindexOfDot) + StringUtil.getFormattedDateTimeNow(true) + newFileName.substring(lastindexOfDot, lenstring); if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobUnZip.Log.CreatingUniqFile", newFileName)); } } // See if the folder to the target file exists... // if (!newFileObject.getParent().exists()) { newFileObject.getParent().createFolder(); // creates the whole path. } InputStream is = null; OutputStream os = null; try { is = KettleVFS.getInputStream(item); os = KettleVFS.getOutputStream(newFileObject, false); if (is != null) { byte[] buff = new byte[2048]; int len; while ((len = is.read(buff)) > 0) { os.write(buff, 0, len); } // Add filename to result filenames addFilenameToResultFilenames(result, parentJob, newFileName); } } finally { if (is != null) { is.close(); } if (os != null) { os.close(); } } } // end if take } } catch (Exception e) { updateErrors(); logError(BaseMessages.getString(PKG, "JobUnZip.Error.CanNotProcessZipEntry", item.getName().getURI(), sourceFileObject.toString()), e); } finally { if (newFileObject != null) { try { newFileObject.close(); if (setOriginalModificationDate) { // Change last modification date newFileObject.getContent() .setLastModifiedTime(item.getContent().getLastModifiedTime()); } } catch (Exception e) { /* Ignore */ } // ignore this } // Close file object // close() does not release resources! KettleVFS.getInstance().getFileSystemManager().closeFileSystem(item.getFileSystem()); if (items != null) { items = null; } } } // Synchronized block on KettleVFS.getInstance().getFileSystemManager() } // End for // Here gc() is explicitly called if e.g. createfile is used in the same // job for the same file. The problem is that after creating the file the // file object is not properly garbaged collected and thus the file cannot // be deleted anymore. This is a known problem in the JVM. // System.gc(); // Unzip done... if (afterunzip == 1) { // delete zip file boolean deleted = fileObject.delete(); if (!deleted) { updateErrors(); logError(BaseMessages.getString(PKG, "JobUnZip.Cant_Delete_File.Label", sourceFileObject.toString())); } // File deleted if (log.isDebug()) { logDebug(BaseMessages.getString(PKG, "JobUnZip.File_Deleted.Label", sourceFileObject.toString())); } } else if (afterunzip == 2) { FileObject destFile = null; // Move File try { String destinationFilename = movetodir + Const.FILE_SEPARATOR + fileObject.getName().getBaseName(); destFile = KettleVFS.getFileObject(destinationFilename, this); fileObject.moveTo(destFile); // File moved if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JobUnZip.Log.FileMovedTo", sourceFileObject.toString(), realMovetodirectory)); } } catch (Exception e) { updateErrors(); logError(BaseMessages.getString(PKG, "JobUnZip.Cant_Move_File.Label", sourceFileObject.toString(), realMovetodirectory, e.getMessage())); } finally { if (destFile != null) { try { destFile.close(); } catch (IOException ex) { /* Ignore */ } } } } retval = true; } catch (Exception e) { updateErrors(); log.logError(BaseMessages.getString(PKG, "JobUnZip.Error.Label"), BaseMessages.getString(PKG, "JobUnZip.ErrorUnzip.Label", sourceFileObject.toString(), e.getMessage()), e); } return retval; }
From source file:org.pentaho.di.job.entries.xmlwellformed.JobEntryXMLWellFormed.java
private boolean processFileFolder(String sourcefilefoldername, String wildcard, Job parentJob, Result result) { boolean entrystatus = false; FileObject sourcefilefolder = null; FileObject CurrentFile = null; // Get real source file and wilcard String realSourceFilefoldername = environmentSubstitute(sourcefilefoldername); if (Const.isEmpty(realSourceFilefoldername)) { logError(BaseMessages.getString(PKG, "JobXMLWellFormed.log.FileFolderEmpty", sourcefilefoldername)); // Update Errors updateErrors();/* ww w . ja va 2 s .co m*/ return entrystatus; } String realWildcard = environmentSubstitute(wildcard); try { sourcefilefolder = KettleVFS.getFileObject(realSourceFilefoldername, this); if (sourcefilefolder.exists()) { if (log.isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JobXMLWellFormed.Log.FileExists", sourcefilefolder.toString())); } if (sourcefilefolder.getType() == FileType.FILE) { entrystatus = checkOneFile(sourcefilefolder, result, parentJob); } else if (sourcefilefolder.getType() == FileType.FOLDER) { FileObject[] fileObjects = sourcefilefolder.findFiles(new AllFileSelector() { public boolean traverseDescendents(FileSelectInfo info) { return true; } public boolean includeFile(FileSelectInfo info) { FileObject fileObject = info.getFile(); try { if (fileObject == null) { return false; } if (fileObject.getType() != FileType.FILE) { return false; } } catch (Exception ex) { // Upon error don't process the file. return false; } finally { if (fileObject != null) { try { fileObject.close(); } catch (IOException ex) { /* Ignore */ } } } return true; } }); if (fileObjects != null) { for (int j = 0; j < fileObjects.length && !parentJob.isStopped(); j++) { if (successConditionBroken) { if (!successConditionBrokenExit) { logError(BaseMessages.getString(PKG, "JobXMLWellFormed.Error.SuccessConditionbroken", "" + NrAllErrors)); successConditionBrokenExit = true; } return false; } // Fetch files in list one after one ... CurrentFile = fileObjects[j]; if (!CurrentFile.getParent().toString().equals(sourcefilefolder.toString())) { // Not in the Base Folder..Only if include sub folders if (include_subfolders) { if (GetFileWildcard(CurrentFile.toString(), realWildcard)) { checkOneFile(CurrentFile, result, parentJob); } } } else { // In the base folder if (GetFileWildcard(CurrentFile.toString(), realWildcard)) { checkOneFile(CurrentFile, result, parentJob); } } } } } else { logError(BaseMessages.getString(PKG, "JobXMLWellFormed.Error.UnknowFileFormat", sourcefilefolder.toString())); // Update Errors updateErrors(); } } else { logError(BaseMessages.getString(PKG, "JobXMLWellFormed.Error.SourceFileNotExists", realSourceFilefoldername)); // Update Errors updateErrors(); } } catch (Exception e) { logError(BaseMessages.getString(PKG, "JobXMLWellFormed.Error.Exception.Processing", realSourceFilefoldername.toString(), e)); // Update Errors updateErrors(); } finally { if (sourcefilefolder != null) { try { sourcefilefolder.close(); } catch (IOException ex) { /* Ignore */ } } if (CurrentFile != null) { try { CurrentFile.close(); } catch (IOException ex) { /* Ignore */ } } } return entrystatus; }
From source file:org.pentaho.di.job.entries.zipfile.JobEntryZipFile.java
/** * Get the requested part of the filename * * @param filename/*from w w w .j a va2 s.c om*/ * the filename (full) (/path/to/a/file.txt) * @param depth * the depth to get. 0 means: the complete filename, 1: the name only (file.txt), 2: one folder (a/file.txt) * 3: two folders (to/a/file.txt) and so on. * @return the requested part of the file name up to a certain depth * @throws KettleFileException */ private String determineZipfilenameForDepth(String filename, int depth) throws KettleException { try { if (Const.isEmpty(filename)) { return null; } if (depth == 0) { return filename; } FileObject fileObject = KettleVFS.getFileObject(filename); FileObject folder = fileObject.getParent(); String baseName = fileObject.getName().getBaseName(); if (depth == 1) { return baseName; } StringBuilder path = new StringBuilder(baseName); int d = 1; while (d < depth && folder != null) { path.insert(0, '/'); path.insert(0, folder.getName().getBaseName()); folder = folder.getParent(); d++; } return path.toString(); } catch (Exception e) { throw new KettleException("Unable to get zip filename '" + filename + "' to depth " + depth, e); } }
From source file:org.pentaho.di.job.JobMeta.java
public String exportResources(VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface namingInterface, Repository repository, IMetaStore metaStore) throws KettleException { String resourceName = null;// w ww. java2s . co m try { // Handle naming for both repository and XML bases resources... // String baseName; String originalPath; String fullname; String extension = "kjb"; if (Const.isEmpty(getFilename())) { // Assume repository... // originalPath = directory.getPath(); baseName = getName(); fullname = directory.getPath() + (directory.getPath().endsWith(RepositoryDirectory.DIRECTORY_SEPARATOR) ? "" : RepositoryDirectory.DIRECTORY_SEPARATOR) + getName() + "." + extension; // } else { // Assume file // FileObject fileObject = KettleVFS.getFileObject(space.environmentSubstitute(getFilename()), space); originalPath = fileObject.getParent().getName().getPath(); baseName = fileObject.getName().getBaseName(); fullname = fileObject.getName().getPath(); } resourceName = namingInterface.nameResource(baseName, originalPath, extension, ResourceNamingInterface.FileNamingType.JOB); ResourceDefinition definition = definitions.get(resourceName); if (definition == null) { // If we do this once, it will be plenty :-) // JobMeta jobMeta = (JobMeta) this.realClone(false); // All objects get re-located to the root folder // jobMeta.setRepositoryDirectory(new RepositoryDirectory()); // Add used resources, modify transMeta accordingly // Go through the list of steps, etc. // These critters change the steps in the cloned TransMeta // At the end we make a new XML version of it in "exported" // format... // loop over steps, databases will be exported to XML anyway. // for (JobEntryCopy jobEntry : jobMeta.jobcopies) { compatibleJobEntryExportResources(jobEntry.getEntry(), jobMeta, definitions, namingInterface, repository); jobEntry.getEntry().exportResources(jobMeta, definitions, namingInterface, repository, metaStore); } // Set a number of parameters for all the data files referenced so far... // Map<String, String> directoryMap = namingInterface.getDirectoryMap(); if (directoryMap != null) { for (String directory : directoryMap.keySet()) { String parameterName = directoryMap.get(directory); jobMeta.addParameterDefinition(parameterName, directory, "Data file path discovered during export"); } } // At the end, add ourselves to the map... // String jobMetaContent = jobMeta.getXML(); definition = new ResourceDefinition(resourceName, jobMetaContent); // Also remember the original filename (if any), including variables etc. // if (Const.isEmpty(this.getFilename())) { // Repository definition.setOrigin(fullname); } else { definition.setOrigin(this.getFilename()); } definitions.put(fullname, definition); } } catch (FileSystemException e) { throw new KettleException( BaseMessages.getString(PKG, "JobMeta.Exception.AnErrorOccuredReadingJob", getFilename()), e); } catch (KettleFileException e) { throw new KettleException( BaseMessages.getString(PKG, "JobMeta.Exception.AnErrorOccuredReadingJob", getFilename()), e); } return resourceName; }
From source file:org.pentaho.di.repository.filerep.KettleFileRepository.java
public ObjectId renameRepositoryDirectory(ObjectId id, RepositoryDirectoryInterface newParentDir, String newName) throws KettleException { if (newParentDir != null || newName != null) { try {/*from w w w . j a va2 s. co m*/ // In case of a root object, the ID is the same as the relative filename... RepositoryDirectoryInterface tree = loadRepositoryDirectoryTree(); RepositoryDirectoryInterface dir = tree.findDirectory(id); if (dir == null) { throw new KettleException("Could not find folder [" + id + "]"); } // If newName is null, keep the current name newName = (newName != null) ? newName : dir.getName(); FileObject folder = KettleVFS.getFileObject(dir.getPath()); String newFolderName = null; if (newParentDir != null) { FileObject newParentFolder = KettleVFS.getFileObject(newParentDir.getPath()); newFolderName = newParentFolder.toString() + "/" + newName; } else { newFolderName = folder.getParent().toString() + "/" + newName; } FileObject newFolder = KettleVFS.getFileObject(newFolderName); folder.moveTo(newFolder); return new StringObjectId(dir.getObjectId()); } catch (Exception e) { throw new KettleException("Unable to rename directory folder to [" + id + "]"); } } return (id); }
From source file:org.pentaho.di.repository.filerep.KettleFileRepository.java
public RepositoryObject getObjectInformation(ObjectId objectId, RepositoryObjectType objectType) throws KettleException { try {/*from w ww . j a v a2 s . c o m*/ String filename = calcDirectoryName(null); if (objectId.getId().startsWith("/")) { filename += objectId.getId().substring(1); } else { filename += objectId.getId(); } FileObject fileObject = KettleVFS.getFileObject(filename); if (!fileObject.exists()) { return null; } FileName fname = fileObject.getName(); String name = fname.getBaseName(); if (!Const.isEmpty(fname.getExtension()) && name.length() > fname.getExtension().length()) { name = name.substring(0, name.length() - fname.getExtension().length() - 1); } String filePath = fileObject.getParent().getName().getPath(); final FileObject baseDirObject = KettleVFS.getFileObject(repositoryMeta.getBaseDirectory()); final int baseDirObjectPathLength = baseDirObject.getName().getPath().length(); final String dirPath = baseDirObjectPathLength <= filePath.length() ? filePath.substring(baseDirObjectPathLength) : "/"; RepositoryDirectoryInterface directory = loadRepositoryDirectoryTree().findDirectory(dirPath); Date lastModified = new Date(fileObject.getContent().getLastModifiedTime()); return new RepositoryObject(objectId, name, directory, "-", lastModified, objectType, "", false); } catch (Exception e) { throw new KettleException("Unable to get object information for object with id=" + objectId, e); } }
From source file:org.pentaho.di.repository.RepositoryExporter.java
private String calcRepositoryDirectory(KettleFileRepository fileRep, FileObject fileObject) throws FileSystemException { String path = fileObject.getParent().getName().getPath(); String baseDirectory = fileRep.getRepositoryMeta().getBaseDirectory(); // Double check! //// w w w. java 2 s . c om if (path.startsWith(baseDirectory)) { return path.substring(baseDirectory.length()); } else { return path; } }
From source file:org.pentaho.di.resource.NameResourceTest.java
/** * This tests ResourceNamingInterface.nameResouce(), comparing the directory maps generated by the legacy and new * method.// w w w .j av a 2 s .c o m * * @param fileName * @param pathOnly * Resolve the path - leave out the file name * @throws Exception * * Legacy: namingResource(String, String, String, FileNamingType) New: namingResource(FileObject, TransMeta) */ private void testNamingResourceLegacyAndNew(String fileName, String extension, String fileMask) throws Exception { // Create a new transformation. TransMeta transMeta = new TransMeta(); FileObject fileObject = KettleVFS.getFileObject(fileName, transMeta); // This code is modeled after the legacy code in legacy step meta classes // that have an exportResources method that deal with file masks // e.g., ExcelInputMeta // // There is a big exception: where the legacy code does a "getName()" // this code does a "getURL()". This is because of the JIRA case // that resulted in the refactoring of ResourceNamingInterface. // // The UNC and VFS protocols where being dropped. // The code you see here would be the fix for that without adding // the new nameResource() to ResourceNamingInterface. // String path = null; String prefix = null; if (Const.isEmpty(fileMask)) { prefix = fileObject.getName().getBaseName(); path = fileObject.getParent().getURL().toString(); } else { prefix = ""; path = fileObject.getURL().toString(); } // Create a resource naming interface to use the legacy method call ResourceNamingInterface resourceNamingInterface_LEGACY = new SequenceResourceNaming(); // Create two resource naming interfaces, one for legacy call, the other for new method call ResourceNamingInterface resourceNamingInterface_NEW = new SequenceResourceNaming(); // The old and new interfaces to get the file name. String resolvedFileName_LEGACY = resourceNamingInterface_LEGACY.nameResource(prefix, path, extension, FileNamingType.DATA_FILE); String resolvedFileName_NEW = resourceNamingInterface_NEW.nameResource(fileObject, transMeta, Const.isEmpty(fileMask)); // get the variable name from both naming interfaces directory maps String pathFromMap_LEGACY = resourceNamingInterface_LEGACY.getDirectoryMap().get(path); String pathFromMap_NEW = resourceNamingInterface_NEW.getDirectoryMap().get(path); // The paths in both directories should be the same assertEquals(pathFromMap_LEGACY, pathFromMap_NEW); // The file names should be the same assertEquals(resolvedFileName_LEGACY, resolvedFileName_NEW); }
From source file:org.pentaho.di.resource.SimpleResourceNaming.java
public String nameResource(FileObject fileObject, VariableSpace space, boolean includeFileName) throws FileSystemException { if (includeFileName) { return handleDataFile(fileObject.getName().getBaseName(), fileObject.getParent().getURL().toString(), ""); } else {/*from ww w .j a va 2 s. c om*/ return handleDataFile("", fileObject.getURL().toString(), ""); } }