List of usage examples for org.apache.commons.vfs FileObject createFolder
public void createFolder() throws FileSystemException;
From source file:com.panet.imeta.core.plugins.PluginLoader.java
/** * "Deploys" the plugin jar file./*from w w w .ja v a 2 s . co m*/ * * @param parent * @return * @throws FileSystemException */ private FileObject explodeJar(FileObject parent) throws FileSystemException { // By Alex, 7/13/07 // Since the JVM does not support nested jars and // URLClassLoaders, we have to hack it // see // http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4735639 // // We do so by exploding the jar, sort of like deploying it FileObject dest = VFS.getManager().resolveFile(Const.getKettleDirectory() + File.separator + WORK_DIR); dest.createFolder(); FileObject destFile = dest.resolveFile(parent.getName().getBaseName()); if (!destFile.exists()) destFile.createFolder(); else // delete children for (FileObject child : destFile.getChildren()) child.delete(new AllFileSelector()); // force VFS to treat it as a jar file explicitly with children, // etc. and copy destFile.copyFrom(!(parent instanceof JarFileObject) ? VFS.getManager().resolveFile(JAR + ":" + parent.getName().getURI()) : parent, new AllFileSelector()); return destFile; }
From source file:com.panet.imeta.trans.steps.luciddbbulkloader.LucidDBBulkLoader.java
public boolean execute(LucidDBBulkLoaderMeta meta, boolean wait) throws KettleException { Runtime rt = Runtime.getRuntime(); try {//from ww w .j av a 2 s . c o m String tableName = environmentSubstitute(meta.getTableName()); // 1) Set up the FIFO folder, create the directory and path to it... // String fifoVfsDirectory = environmentSubstitute(meta.getFifoDirectory()); FileObject directory = KettleVFS.getFileObject(fifoVfsDirectory); directory.createFolder(); String fifoDirectory = KettleVFS.getFilename(directory); // 2) Create the FIFO file using the "mkfifo" command... // Make sure to log all the possible output, also from STDERR // data.fifoFilename = KettleVFS.getFilename(directory) + Const.FILE_SEPARATOR + tableName + ".csv"; data.bcpFilename = KettleVFS.getFilename(directory) + Const.FILE_SEPARATOR + tableName + ".bcp"; File fifoFile = new File(data.fifoFilename); if (!fifoFile.exists()) { String mkFifoCmd = "mkfifo " + data.fifoFilename + ""; logBasic("Creating FIFO file using this command : " + mkFifoCmd); Process mkFifoProcess = rt.exec(mkFifoCmd); StreamLogger errorLogger = new StreamLogger(mkFifoProcess.getErrorStream(), "mkFifoError"); StreamLogger outputLogger = new StreamLogger(mkFifoProcess.getInputStream(), "mkFifoOuptut"); new Thread(errorLogger).start(); new Thread(outputLogger).start(); int result = mkFifoProcess.waitFor(); if (result != 0) { throw new Exception("Return code " + result + " received from statement : " + mkFifoCmd); } } // 3) Make a connection to LucidDB for sending SQL commands // (Also, we need a clear cache for getting up-to-date target metadata) DBCache.getInstance().clear(meta.getDatabaseMeta().getName()); data.db = new Database(meta.getDatabaseMeta()); data.db.shareVariablesWith(this); // Connect to the database if (getTransMeta().isUsingUniqueConnections()) { synchronized (getTrans()) { data.db.connect(getTrans().getThreadName(), getPartitionID()); } } else { data.db.connect(getPartitionID()); } logBasic("Connected to LucidDB"); // 4) Now we are ready to create the LucidDB FIFO server that will handle the actual bulk loading. // String fifoServerStatement = ""; fifoServerStatement += "create or replace server " + meta.getFifoServerName() + Const.CR; fifoServerStatement += "foreign data wrapper sys_file_wrapper" + Const.CR; fifoServerStatement += "options (" + Const.CR; fifoServerStatement += "directory '" + fifoDirectory + "'," + Const.CR; fifoServerStatement += "file_extension 'csv'," + Const.CR; fifoServerStatement += "with_header 'no'," + Const.CR; fifoServerStatement += "num_rows_scan '0'," + Const.CR; fifoServerStatement += "lenient 'no');" + Const.CR; logBasic("Creating LucidDB fifo_server with the following command: " + fifoServerStatement); data.db.execStatements(fifoServerStatement); // 5) Set the error limit in the LucidDB session // REVIEW jvs 13-Dec-2008: is this guaranteed to retain the same // connection? String errorMaxStatement = ""; errorMaxStatement += "alter session set \"errorMax\" = " + meta.getMaxErrors() + ";" + Const.CR; logBasic("Setting error limit in LucidDB session with the following command: " + errorMaxStatement); data.db.execStatements(errorMaxStatement); // 6) Now we also need to create a bulk loader file .bcp // createBulkLoadConfigFile(data.bcpFilename); // 7) execute the actual load command! // This will actually block until the load is done in the // separate execution thread; see notes in executeLoadCommand // on why it's important for this to occur BEFORE // opening our end of the FIFO. // executeLoadCommand(tableName); // 8) We have to write rows to the FIFO file later on. data.fifoStream = new BufferedOutputStream(new FileOutputStream(fifoFile)); } catch (Exception ex) { throw new KettleException(ex); } return true; }
From source file:com.panet.imeta.trans.steps.propertyoutput.PropertyOutput.java
private void createParentFolder() throws KettleException { // Do we need to create parent folder ? if (meta.isCreateParentFolder()) { // Check for parent folder FileObject parentfolder = null; try {//from www. ja va2 s . c om // Get parent folder parentfolder = KettleVFS.getFileObject(data.filename).getParent(); if (!parentfolder.exists()) { if (log.isDetailed()) log.logDetailed(toString(), Messages.getString("PropertyOutput.Log.ParentFolderExists", parentfolder.getName().toString())); parentfolder.createFolder(); if (log.isDetailed()) log.logDetailed(toString(), Messages.getString( "PropertyOutput.Log.CanNotCreateParentFolder", parentfolder.getName().toString())); } } catch (Exception e) { // The field is unreachable ! logError(Messages.getString("PropertyOutput.Log.CanNotCreateParentFolder", parentfolder.getName().toString())); throw new KettleException(Messages.getString("PropertyOutput.Log.CanNotCreateParentFolder", parentfolder.getName().toString())); } finally { if (parentfolder != null) { try { parentfolder.close(); } catch (Exception ex) { } ; } } } }
From source file:com.panet.imeta.trans.steps.processfiles.ProcessFiles.java
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException { meta = (ProcessFilesMeta) smi;//from w ww .j a v a 2 s . c om data = (ProcessFilesData) sdi; Object[] r = getRow(); // Get row from input rowset & set row busy! if (r == null) // no more input to be expected... { setOutputDone(); return false; } if (first) { first = false; // Check is source filename field is provided if (Const.isEmpty(meta.getDynamicSourceFileNameField())) { throw new KettleException(Messages.getString("ProcessFiles.Error.SourceFilenameFieldMissing")); } // Check is target filename field is provided if (meta.getOperationType() != ProcessFilesMeta.OPERATION_TYPE_DELETE && Const.isEmpty(meta.getDynamicTargetFileNameField())) { throw new KettleException(Messages.getString("ProcessFiles.Error.TargetFilenameFieldMissing")); } // cache the position of the source filename field if (data.indexOfSourceFilename < 0) { data.indexOfSourceFilename = getInputRowMeta().indexOfValue(meta.getDynamicSourceFileNameField()); if (data.indexOfSourceFilename < 0) { // The field is unreachable ! throw new KettleException(Messages.getString("ProcessFiles.Exception.CouldnotFindField", //$NON-NLS-1$ meta.getDynamicSourceFileNameField())); //$NON-NLS-2$ } } // cache the position of the source filename field if (meta.getOperationType() != ProcessFilesMeta.OPERATION_TYPE_DELETE && data.indexOfTargetFilename < 0) { data.indexOfTargetFilename = getInputRowMeta().indexOfValue(meta.getDynamicTargetFileNameField()); if (data.indexOfTargetFilename < 0) { // The field is unreachable ! throw new KettleException(Messages.getString("ProcessFiles.Exception.CouldnotFindField", //$NON-NLS-1$ meta.getDynamicTargetFileNameField())); //$NON-NLS-2$ } } if (meta.simulate) { if (log.isBasic()) log.logBasic(toString(), Messages.getString("ProcessFiles.Log.SimulationModeON")); } } // End If first try { // get source filename String sourceFilename = getInputRowMeta().getString(r, data.indexOfSourceFilename); if (Const.isEmpty(sourceFilename)) { log.logError(toString(), Messages.getString("ProcessFiles.Error.SourceFileEmpty")); throw new KettleException(Messages.getString("ProcessFiles.Error.SourceFileEmpty")); } data.sourceFile = KettleVFS.getFileObject(sourceFilename); boolean targetFileExists = false; if (!data.sourceFile.exists()) { log.logError(toString(), Messages.getString("ProcessFiles.Error.SourceFileNotExist", sourceFilename)); throw new KettleException( Messages.getString("ProcessFiles.Error.SourceFileNotExist", sourceFilename)); } if (data.sourceFile.getType() != FileType.FILE) { log.logError(toString(), Messages.getString("ProcessFiles.Error.SourceFileNotFile", sourceFilename)); throw new KettleException( Messages.getString("ProcessFiles.Error.SourceFileNotFile", sourceFilename)); } String targetFilename = null; if (meta.getOperationType() != ProcessFilesMeta.OPERATION_TYPE_DELETE) { // get value for target filename targetFilename = getInputRowMeta().getString(r, data.indexOfTargetFilename); if (Const.isEmpty(targetFilename)) { log.logError(toString(), Messages.getString("ProcessFiles.Error.TargetFileEmpty")); throw new KettleException(Messages.getString("ProcessFiles.Error.TargetFileEmpty")); } data.targetFile = KettleVFS.getFileObject(targetFilename); if (data.targetFile.exists()) { if (log.isDetailed()) log.logDetailed(toString(), Messages.getString("ProcessFiles.Log.TargetFileExists", targetFilename)); } else { // let's check parent folder FileObject parentFolder = data.targetFile.getParent(); if (!parentFolder.exists()) { if (!meta.isCreateParentFolder()) throw new KettleException(Messages.getString( "ProcessFiles.Error.TargetParentFolderNotExists", parentFolder.toString())); else parentFolder.createFolder(); } if (parentFolder != null) parentFolder.close(); } } switch (meta.getOperationType()) { case ProcessFilesMeta.OPERATION_TYPE_COPY: if (((meta.isOverwriteTargetFile() && targetFileExists) || !targetFileExists) && !meta.simulate) data.targetFile.copyFrom(data.sourceFile, new TextOneToOneFileSelector()); if (log.isDetailed()) log.logDetailed(toString(), Messages.getString("ProcessFiles.Log.SourceFileCopied", sourceFilename, targetFilename)); break; case ProcessFilesMeta.OPERATION_TYPE_MOVE: if (((meta.isOverwriteTargetFile() && targetFileExists) || !targetFileExists) && !meta.simulate) data.sourceFile.moveTo(KettleVFS.getFileObject(targetFilename)); if (log.isDetailed()) log.logDetailed(toString(), Messages.getString("ProcessFiles.Log.SourceFileMoved", sourceFilename, targetFilename)); break; case ProcessFilesMeta.OPERATION_TYPE_DELETE: if (!meta.simulate) { if (!data.sourceFile.delete()) throw new KettleException(Messages.getString("ProcessFiles.Error.CanNotDeleteFile", data.sourceFile.toString())); } if (log.isDetailed()) log.logDetailed(toString(), Messages.getString("ProcessFiles.Log.SourceFileDeleted", sourceFilename)); break; default: break; } // add filename to result filenames? if (meta.isaddTargetFileNametoResult() && meta.getOperationType() != ProcessFilesMeta.OPERATION_TYPE_DELETE && data.sourceFile.getType() == FileType.FILE) { // Add this to the result file names... ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, data.targetFile, getTransMeta().getName(), getStepname()); resultFile.setComment(Messages.getString("ProcessFiles.Log.FileAddedResult")); addResultFile(resultFile); if (log.isDetailed()) log.logDetailed(toString(), Messages.getString("ProcessFiles.Log.FilenameAddResult", data.sourceFile.toString())); } putRow(getInputRowMeta(), r); // copy row to possible alternate rowset(s). if (checkFeedback(getLinesRead())) { if (log.isBasic()) logBasic(Messages.getString("ProcessFiles.LineNumber") + getLinesRead()); //$NON-NLS-1$ } } catch (Exception e) { boolean sendToErrorRow = false; String errorMessage = null; if (getStepMeta().isDoingErrorHandling()) { sendToErrorRow = true; errorMessage = e.toString(); } else { logError(Messages.getString("ProcessFiles.ErrorInStepRunning") + e.getMessage()); //$NON-NLS-1$ setErrors(1); stopAll(); setOutputDone(); // signal end to receiver(s) return false; } if (sendToErrorRow) { // Simply add this row to the error row putError(getInputRowMeta(), r, 1, errorMessage, null, "ProcessFiles001"); } } return true; }
From source file:com.thinkberg.moxo.dav.MkColHandler.java
public void service(HttpServletRequest request, HttpServletResponse response) throws IOException { if (request.getReader().readLine() != null) { response.sendError(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE); return;//from w w w . j ava 2 s . co m } FileObject object = getResourceManager().getFileObject(request.getPathInfo()); try { LockManager.getInstance().checkCondition(object, getIf(request)); } catch (LockException e) { if (e.getLocks() != null) { response.sendError(SC_LOCKED); } else { response.sendError(HttpServletResponse.SC_PRECONDITION_FAILED); } return; } if (object.exists()) { response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED); return; } if (!object.getParent().exists() || !FileType.FOLDER.equals(object.getParent().getType())) { response.sendError(HttpServletResponse.SC_CONFLICT); return; } try { object.createFolder(); response.setStatus(HttpServletResponse.SC_CREATED); } catch (FileSystemException e) { response.sendError(HttpServletResponse.SC_FORBIDDEN); } }
From source file:com.panet.imeta.trans.steps.sqlfileoutput.SQLFileOutput.java
public boolean init(StepMetaInterface smi, StepDataInterface sdi) { meta = (SQLFileOutputMeta) smi;//from w w w.j a va 2s . co m data = (SQLFileOutputData) sdi; if (super.init(smi, sdi)) { try { if (meta.getDatabaseMeta() == null) { throw new KettleStepException("The connection is not defined (empty)"); } data.db = new Database(meta.getDatabaseMeta()); data.db.shareVariablesWith(this); logBasic("Connected to database [" + meta.getDatabaseMeta() + "]"); if (meta.isCreateParentFolder()) { // Check for parent folder FileObject parentfolder = null; try { // Get parent folder String filename = environmentSubstitute(meta.getFileName()); parentfolder = KettleVFS.getFileObject(filename).getParent(); if (!parentfolder.exists()) { log.logBasic("Folder parent", "Folder parent " + parentfolder.getName() + " does not exist !"); parentfolder.createFolder(); log.logBasic("Folder parent", "Folder parent was created."); } } catch (Exception e) { logError("Couldn't created parent folder " + parentfolder.getName()); setErrors(1L); stopAll(); } finally { if (parentfolder != null) { try { parentfolder.close(); } catch (Exception ex) { } ; } } } if (!meta.isDoNotOpenNewFileInit()) { if (!openNewFile()) { logError("Couldn't open file [" + buildFilename() + "]"); setErrors(1L); stopAll(); } } tableName = environmentSubstitute(meta.getTablename()); schemaName = environmentSubstitute(meta.getSchemaName()); if (Const.isEmpty(tableName)) { throw new KettleStepException("The tablename is not defined (empty)"); } schemaTable = data.db.getDatabaseMeta().getQuotedSchemaTableCombination(schemaName, tableName); } catch (Exception e) { logError("An error occurred intialising this step: " + e.getMessage()); stopAll(); setErrors(1); } return true; } return false; }
From source file:com.newatlanta.appengine.vfs.provider.GaeFileObject.java
/** * Renames the file. If a folder, recursively rename the children. */// w w w . ja va2 s .c o m @Override protected void doRename(FileObject newfile) throws IOException { if (this.getType().hasChildren()) { // rename the children for (FileObject child : this.getChildren()) { String newChildPath = child.getName().getPath().replace(this.getName().getPath(), newfile.getName().getPath()); child.moveTo(resolveFile(newChildPath)); } newfile.createFolder(); } else { if (this.getContent().isOpen()) { // causes re-attach throw new IOException(this.getName() + " content is open"); } GaeFileObject newGaeFile = (GaeFileObject) newfile; newGaeFile.metadata.setPropertiesFrom(this.metadata); // copy contents (blocks) to new file Map<Key, Entity> blocks = datastore.get(getBlockKeys(0)); List<Entity> newBlocks = new ArrayList<Entity>(blocks.size()); for (Entity block : blocks.values()) { Entity newBlock = newGaeFile.getBlock(block.getKey().getId() - 1); newBlock.setPropertiesFrom(block); newBlocks.add(newBlock); } newGaeFile.putContent(newBlocks); } }
From source file:com.thinkberg.webdav.MkColHandler.java
public void service(HttpServletRequest request, HttpServletResponse response) throws IOException { BufferedReader bufferedReader = request.getReader(); String line = bufferedReader.readLine(); if (line != null) { response.sendError(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE); return;// ww w . ja va 2 s . c o m } FileObject object = VFSBackend.resolveFile(request.getPathInfo()); try { if (!LockManager.getInstance().evaluateCondition(object, getIf(request)).result) { response.sendError(HttpServletResponse.SC_PRECONDITION_FAILED); return; } } catch (LockException e) { response.sendError(SC_LOCKED); return; } catch (ParseException e) { response.sendError(HttpServletResponse.SC_PRECONDITION_FAILED); return; } if (object.exists()) { response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED); return; } if (!object.getParent().exists() || !FileType.FOLDER.equals(object.getParent().getType())) { response.sendError(HttpServletResponse.SC_CONFLICT); return; } try { object.createFolder(); response.setStatus(HttpServletResponse.SC_CREATED); } catch (FileSystemException e) { response.sendError(HttpServletResponse.SC_FORBIDDEN); } }
From source file:com.newatlanta.appengine.vfs.provider.GaeFileObject.java
/** * Override the superclass implementation to make sure GaeVFS "shadows" * exist for local directories./*from w w w.ja v a2s . com*/ */ @Override public FileObject getParent() throws FileSystemException { FileObject parent = super.getParent(); if ((parent != null) && !parent.exists()) { // check for existing local directory FileSystemManager manager = getFileSystem().getFileSystemManager(); FileObject localDir = manager.resolveFile("file://" + GaeFileNameParser.getRootPath(manager.getBaseFile().getName()) + parent.getName().getPath()); if (localDir.exists() && localDir.getType().hasChildren()) { parent.createFolder(); // make sure GaeVFS "shadow" folder exists } } return parent; }
From source file:com.panet.imeta.job.entries.copymoveresultfilenames.JobEntryCopyMoveResultFilenames.java
private boolean CreateDestinationFolder(String foldername, LogWriter log) { FileObject folder = null; try {/*from www.j av a 2 s . c o m*/ folder = KettleVFS.getFileObject(foldername); if (!folder.exists()) { log.logError(toString(), Messages.getString("JobEntryCopyMoveResultFilenames.Log.FolderNotExists", foldername)); if (isCreateDestinationFolder()) folder.createFolder(); else return false; if (log.isBasic()) log.logBasic(toString(), Messages.getString("JobEntryCopyMoveResultFilenames.Log.FolderCreated", foldername)); } else { if (log.isDetailed()) log.logDetailed(toString(), Messages.getString("JobEntryCopyMoveResultFilenames.Log.FolderExists", foldername)); } return true; } catch (Exception e) { log.logError(toString(), Messages.getString("JobEntryCopyMoveResultFilenames.Log.CanNotCreatedFolder", foldername, e.toString())); } finally { if (folder != null) { try { folder.close(); } catch (Exception ex) { } ; } } return false; }