Example usage for org.apache.commons.vfs FileObject delete

List of usage examples for org.apache.commons.vfs FileObject delete

Introduction

In this page you can find the example usage for org.apache.commons.vfs FileObject delete.

Prototype

public boolean delete() throws FileSystemException;

Source Link

Document

Deletes this file.

Usage

From source file:org.pentaho.di.trans.steps.orabulkloader.OraBulkLoader.java

public void dispose(StepMetaInterface smi, StepDataInterface sdi) {
    meta = (OraBulkLoaderMeta) smi;/*  www . j a v a2s. c  o  m*/
    data = (OraBulkLoaderData) sdi;

    super.dispose(smi, sdi);

    // close output stream (may terminate running sqlldr)
    if (output != null) {
        // Close the output
        try {
            output.close();
        } catch (IOException e) {
            logError("Error while closing output", e);
        }

        output = null;
    }
    // running sqlldr process must be terminated
    if (sqlldrProcess != null) {
        try {
            int exitVal = sqlldrProcess.waitFor();
            sqlldrProcess = null;
            logBasic(BaseMessages.getString(PKG, "OraBulkLoader.Log.ExitValueSqlldr", "" + exitVal));
        } catch (InterruptedException e) {
            /* process should be destroyed */
            e.printStackTrace();
            if (sqlldrProcess != null) {
                sqlldrProcess.destroy();
            }
        }
    }

    if (!preview && meta.isEraseFiles()) {
        // Erase the created cfg/dat files if requested. We don't erase
        // the rest of the files because it would be "stupid" to erase them
        // right after creation. If you don't want them, don't fill them in.
        FileObject fileObject = null;

        String method = meta.getLoadMethod();
        // OraBulkLoaderMeta.METHOD_AUTO_CONCURRENT.equals(method) ||
        if (OraBulkLoaderMeta.METHOD_AUTO_END.equals(method)) {
            if (meta.getControlFile() != null) {
                try {
                    fileObject = KettleVFS.getFileObject(environmentSubstitute(meta.getControlFile()),
                            getTransMeta());
                    fileObject.delete();
                    fileObject.close();
                } catch (Exception ex) {
                    logError("Error deleting control file \'" + KettleVFS.getFilename(fileObject) + "\': "
                            + ex.getMessage(), ex);
                }
            }
        }

        if (OraBulkLoaderMeta.METHOD_AUTO_END.equals(method)) {
            // In concurrent mode the data is written to the control file.
            if (meta.getDataFile() != null) {
                try {
                    fileObject = KettleVFS.getFileObject(environmentSubstitute(meta.getDataFile()),
                            getTransMeta());
                    fileObject.delete();
                    fileObject.close();
                } catch (Exception ex) {
                    logError("Error deleting data file \'" + KettleVFS.getFilename(fileObject) + "\': "
                            + ex.getMessage(), ex);
                }
            }
        }

        if (OraBulkLoaderMeta.METHOD_MANUAL.equals(method)) {
            logBasic("Deletion of files is not compatible with \'manual load method\'");
        }
    }
}

From source file:org.pentaho.di.trans.steps.script.ScriptAddedFunctions.java

public static void deleteFile(ScriptEngine actualContext, Bindings actualObject, Object[] ArgList,
        Object FunctionContext) {

    try {/*w ww.jav a2s.  c o  m*/
        if (ArgList.length == 1 && !isNull(ArgList[0]) && !isUndefined(ArgList[0])) {
            // Object act = actualObject.get("_step_", actualObject);
            // ScriptValuesMod act = (ScriptValuesMod)Context.toType(scm_delete, ScriptValuesMod.class);

            FileObject fileObject = null;

            try {
                fileObject = KettleVFS.getFileObject((String) ArgList[0]);
                if (fileObject.exists()) {
                    if (fileObject.getType() == FileType.FILE) {
                        if (!fileObject.delete()) {
                            new RuntimeException("We can not delete file [" + (String) ArgList[0] + "]!");
                        }
                    }

                } else {
                    new RuntimeException("file [" + (String) ArgList[0] + "] can not be found!");
                }
            } catch (IOException e) {
                throw new RuntimeException("The function call deleteFile is not valid.");
            } finally {
                if (fileObject != null) {
                    try {
                        fileObject.close();
                    } catch (Exception e) {
                        // Ignore errors
                    }
                }
            }

        } else {
            throw new RuntimeException("The function call deleteFile is not valid.");
        }
    } catch (Exception e) {
        throw new RuntimeException(e.toString());
    }
}

From source file:org.pentaho.di.trans.steps.scriptvalues_mod.ScriptValuesAddedFunctions.java

public static void deleteFile(Context actualContext, Scriptable actualObject, Object[] ArgList,
        Function FunctionContext) {

    try {/*  w w w.  j a  v  a  2  s  . c  o m*/
        if (ArgList.length == 1 && !isNull(ArgList[0]) && !isUndefined(ArgList[0])) {
            // Object act = actualObject.get("_step_", actualObject);
            // ScriptValuesMod act = (ScriptValuesMod)Context.toType(scm_delete, ScriptValuesMod.class);

            FileObject fileObject = null;

            try {
                fileObject = KettleVFS.getFileObject(Context.toString(ArgList[0]));
                if (fileObject.exists()) {
                    if (fileObject.getType() == FileType.FILE) {
                        if (!fileObject.delete()) {
                            Context.reportRuntimeError(
                                    "We can not delete file [" + Context.toString(ArgList[0]) + "]!");
                        }
                    }

                } else {
                    Context.reportRuntimeError("file [" + Context.toString(ArgList[0]) + "] can not be found!");
                }
            } catch (IOException e) {
                throw Context.reportRuntimeError("The function call deleteFile is not valid.");
            } finally {
                if (fileObject != null) {
                    try {
                        fileObject.close();
                    } catch (Exception e) {
                        // Ignore errors
                    }
                }
            }

        } else {
            throw Context.reportRuntimeError("The function call deleteFile is not valid.");
        }
    } catch (Exception e) {
        throw Context.reportRuntimeError(e.toString());
    }
}

From source file:org.pentaho.di.trans.steps.sftpput.SFTPPut.java

protected void finishTheJob(FileObject file, String sourceData, FileObject destinationFolder)
        throws KettleException {
    try {//from   w w  w  .j  a v  a2  s . c  om
        switch (meta.getAfterFTPS()) {
        case JobEntrySFTPPUT.AFTER_FTPSPUT_DELETE:
            // Delete source file
            if (!file.exists()) {
                file.delete();
                if (isDetailed()) {
                    logDetailed(BaseMessages.getString(PKG, "SFTPPut.Log.DeletedFile", sourceData));
                }
            }
            break;
        case JobEntrySFTPPUT.AFTER_FTPSPUT_MOVE:
            // Move source file
            FileObject destination = null;
            try {
                destination = KettleVFS.getFileObject(destinationFolder.getName().getBaseName()
                        + Const.FILE_SEPARATOR + file.getName().getBaseName(), this);
                file.moveTo(destination);
                if (isDetailed()) {
                    logDetailed(BaseMessages.getString(PKG, "SFTPPut.Log.FileMoved", file, destination));
                }
            } finally {
                if (destination != null) {
                    destination.close();
                }
            }
            break;
        default:
            if (meta.isAddFilenameResut()) {
                // Add this to the result file names...
                ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, file,
                        getTransMeta().getName(), getStepname());
                resultFile
                        .setComment(BaseMessages.getString(PKG, "SFTPPut.Log.FilenameAddedToResultFilenames"));
                addResultFile(resultFile);
                if (isDetailed()) {
                    logDetailed(BaseMessages.getString(PKG, "SFTPPut.Log.FilenameAddedToResultFilenames",
                            sourceData));
                }
            }
            break;
        }
    } catch (Exception e) {
        throw new KettleException(e);
    }
}

From source file:org.pentaho.di.trans.steps.sort.SortRows.java

Object[] getBuffer() throws KettleValueException {
    Object[] retval;/*from ww w  .j av  a  2  s.  c o  m*/

    // Open all files at once and read one row from each file...
    if (data.files.size() > 0 && (data.dis.size() == 0 || data.fis.size() == 0)) {
        if (log.isBasic()) {
            logBasic(BaseMessages.getString(PKG, "SortRows.Basic.OpeningTempFiles", data.files.size()));
        }

        try {
            for (int f = 0; f < data.files.size() && !isStopped(); f++) {
                FileObject fileObject = data.files.get(f);
                String filename = KettleVFS.getFilename(fileObject);
                if (log.isDetailed()) {
                    logDetailed(BaseMessages.getString(PKG, "SortRows.Detailed.OpeningTempFile", filename));
                }
                InputStream fi = KettleVFS.getInputStream(fileObject);
                DataInputStream di;
                data.fis.add(fi);
                if (data.compressFiles) {
                    di = getDataInputStream(new GZIPInputStream(new BufferedInputStream(fi)));
                } else {
                    di = new DataInputStream(new BufferedInputStream(fi, 50000));
                }
                data.dis.add(di);

                // How long is the buffer?
                int buffersize = data.bufferSizes.get(f);

                if (log.isDetailed()) {
                    logDetailed(BaseMessages.getString(PKG, "SortRows.Detailed.FromFileExpectingRows", filename,
                            buffersize));
                }

                if (buffersize > 0) {
                    Object[] row = data.outputRowMeta.readData(di);
                    data.rowbuffer.add(row); // new row from input stream
                    data.tempRows.add(new RowTempFile(row, f));
                }
            }

            // Sort the data row buffer
            Collections.sort(data.tempRows, data.comparator);
        } catch (Exception e) {
            logError(BaseMessages.getString(PKG, "SortRows.Error.ErrorReadingBackTempFiles"), e);
        }
    }

    if (data.files.size() == 0) {
        // read from in-memory processing

        if (data.getBufferIndex < data.buffer.size()) {
            retval = data.buffer.get(data.getBufferIndex);
            data.getBufferIndex++;
        } else {
            retval = null;
        }
    } else {
        // read from disk processing

        if (data.rowbuffer.size() == 0) {
            retval = null;
        } else {
            // We now have "filenr" rows waiting: which one is the smallest?
            //
            if (log.isRowLevel()) {
                for (int i = 0; i < data.rowbuffer.size() && !isStopped(); i++) {
                    Object[] b = data.rowbuffer.get(i);
                    logRowlevel(BaseMessages.getString(PKG, "SortRows.RowLevel.PrintRow", i,
                            data.outputRowMeta.getString(b)));
                }
            }

            RowTempFile rowTempFile = data.tempRows.remove(0);
            retval = rowTempFile.row;
            int smallest = rowTempFile.fileNumber;

            // now get another Row for position smallest

            FileObject file = data.files.get(smallest);
            DataInputStream di = data.dis.get(smallest);
            InputStream fi = data.fis.get(smallest);

            try {
                Object[] row2 = data.outputRowMeta.readData(di);
                RowTempFile extra = new RowTempFile(row2, smallest);

                int index = Collections.binarySearch(data.tempRows, extra, data.comparator);
                if (index < 0) {
                    data.tempRows.add(index * (-1) - 1, extra);
                } else {
                    data.tempRows.add(index, extra);
                }
            } catch (KettleFileException fe) { // empty file or EOF mostly
                GZIPInputStream gzfi = (data.compressFiles) ? data.gzis.get(smallest) : null;
                try {
                    di.close();
                    fi.close();
                    if (gzfi != null) {
                        gzfi.close();
                    }
                    file.delete();
                } catch (IOException e) {
                    logError(BaseMessages.getString(PKG, "SortRows.Error.UnableToCloseFile", smallest,
                            file.toString()));
                    setErrors(1);
                    stopAll();
                    return null;
                }

                data.files.remove(smallest);
                data.dis.remove(smallest);
                data.fis.remove(smallest);

                if (gzfi != null) {
                    data.gzis.remove(smallest);
                }

                // Also update all file numbers in in data.tempRows if they are larger
                // than smallest.
                //
                for (RowTempFile rtf : data.tempRows) {
                    if (rtf.fileNumber > smallest) {
                        rtf.fileNumber--;
                    }
                }
            } catch (SocketTimeoutException e) {
                throw new KettleValueException(e); // should never happen on local files
            }
        }
    }
    return retval;
}

From source file:org.pentaho.di.trans.steps.sort.SortRows.java

private void clearBuffers() {

    // Clean out the sort buffer
    data.buffer.clear();/*from   ww w  .ja  va  2  s . c o m*/
    data.getBufferIndex = 0;
    data.rowbuffer.clear();

    // close any open DataInputStream objects
    if ((data.dis != null) && (data.dis.size() > 0)) {
        for (DataInputStream dis : data.dis) {
            BaseStep.closeQuietly(dis);
        }
    }
    // close any open InputStream objects
    if ((data.fis != null) && (data.fis.size() > 0)) {
        for (InputStream is : data.fis) {
            BaseStep.closeQuietly(is);
        }
    }
    // remove temp files
    for (int f = 0; f < data.files.size(); f++) {
        FileObject fileToDelete = data.files.get(f);
        try {
            if (fileToDelete != null && fileToDelete.exists()) {
                fileToDelete.delete();
            }
        } catch (FileSystemException e) {
            logError(e.getLocalizedMessage(), e);
        }
    }
}

From source file:org.pentaho.hdfs.vfs.test.HDFSVFSTest.java

@Test
public void readFile() throws Exception {
    assertNotNull("FileSystemManager is null", fsManager);

    FileObject hdfsFileOut = fsManager.resolveFile(buildHDFSURL("/junit/file.txt"));
    OutputStream out = hdfsFileOut.getContent().getOutputStream();
    out.write(HELLO_HADOOP_STR.getBytes("UTF-8"));
    out.close();//from   ww w. j  a v a 2s. co m

    FileObject file = fsManager.resolveFile(buildHDFSURL("/junit/file.txt"));
    assertNotNull("File is null (could not resolve?)", file);
    String fileStr = IOUtils.toString(file.getContent().getInputStream(), "UTF-8");
    assertEquals(HELLO_HADOOP_STR, fileStr);

    file.delete();
}

From source file:org.pentaho.hdfs.vfs.test.HDFSVFSTest.java

@Test
public void writeFile() throws Exception {
    assertNotNull("FileSystemManager is null", fsManager);
    FileObject file = fsManager.resolveFile(buildHDFSURL("/junit/out.txt"));
    assertEquals(FileType.IMAGINARY, file.getType());
    assertNotNull("File is null (could not resolve?)", file);

    OutputStream output = file.getContent().getOutputStream();
    IOUtils.write(HELLO_HADOOP_STR, output);
    IOUtils.closeQuietly(output);//from  w  w w.ja  v a  2 s .co m
    assertEquals(FileType.FILE, file.getType());

    String fileStr = IOUtils.toString(file.getContent().getInputStream(), "UTF-8");
    assertEquals(HELLO_HADOOP_STR, fileStr);

    file.delete();
    file = fsManager.resolveFile(buildHDFSURL("/junit/out.txt"));
    assertEquals(FileType.IMAGINARY, file.getType());
}

From source file:org.pentaho.hdfs.vfs.test.HDFSVFSTest.java

@Test
public void deleteFile() throws Exception {
    assertNotNull("FileSystemManager is null", fsManager);
    FileObject file = fsManager.resolveFile(buildHDFSURL("/junit/out.txt"));
    assertNotNull("File is null (could not resolve?)", file);
    assertEquals(FileType.IMAGINARY, file.getType());

    OutputStream output = file.getContent().getOutputStream();
    IOUtils.write(HELLO_HADOOP_STR, output);
    IOUtils.closeQuietly(output);/*from   www  . j  ava  2  s . c  o m*/
    assertEquals(FileType.FILE, file.getType());

    String fileStr = IOUtils.toString(file.getContent().getInputStream(), "UTF-8");
    assertEquals(HELLO_HADOOP_STR, fileStr);
    file.delete();

    file = fsManager.resolveFile(buildHDFSURL("/junit/out.txt"));
    assertEquals(FileType.IMAGINARY, file.getType());
}

From source file:org.pentaho.hdfs.vfs.test.HDFSVFSTest.java

@Test
public void createFolder() throws Exception {
    assertNotNull("FileSystemManager is null", fsManager);
    FileObject folder = fsManager.resolveFile(buildHDFSURL("/junit/folder"));
    assertNotNull("File is null (could not resolve?)", folder);
    assertEquals(FileType.IMAGINARY, folder.getType());
    folder.createFolder();//from  w  w  w . j av  a 2  s. c  o  m

    folder = fsManager.resolveFile(buildHDFSURL("/junit/folder"));
    assertNotNull("File is null (could not resolve?)", folder);
    assertEquals(FileType.FOLDER, folder.getType());

    folder = fsManager.resolveFile(buildHDFSURL("/junit/folder"));
    assertNotNull("File is null (could not resolve?)", folder);
    assertEquals(FileType.FOLDER, folder.getType());
    assertEquals(true, folder.delete());

    folder = fsManager.resolveFile(buildHDFSURL("/junit/folder"));
    assertNotNull("File is null (could not resolve?)", folder);
    assertEquals(FileType.IMAGINARY, folder.getType());
}