List of usage examples for org.apache.commons.vfs FileObject getContent
public FileContent getContent() throws FileSystemException;
From source file:org.pentaho.di.trans.steps.mail.Mail.java
private void setAttachedFilesList(Object[] r, LogChannelInterface log) throws Exception { String realSourceFileFoldername = null; String realSourceWildcard = null; FileObject sourcefile = null; FileObject file = null; ZipOutputStream zipOutputStream = null; File masterZipfile = null;//from w w w . j a va 2s. co m if (meta.isZipFilenameDynamic()) { data.ZipFilename = data.previousRowMeta.getString(r, data.indexOfDynamicZipFilename); } try { if (meta.isDynamicFilename()) { // dynamic attached filenames if (data.indexOfSourceFilename > -1) { realSourceFileFoldername = data.previousRowMeta.getString(r, data.indexOfSourceFilename); } if (data.indexOfSourceWildcard > -1) { realSourceWildcard = data.previousRowMeta.getString(r, data.indexOfSourceWildcard); } } else { // static attached filenames realSourceFileFoldername = data.realSourceFileFoldername; realSourceWildcard = data.realSourceWildcard; } if (!Const.isEmpty(realSourceFileFoldername)) { sourcefile = KettleVFS.getFileObject(realSourceFileFoldername, getTransMeta()); if (sourcefile.exists()) { long FileSize = 0; FileObject[] list = null; if (sourcefile.getType() == FileType.FILE) { list = new FileObject[1]; list[0] = sourcefile; } else { list = sourcefile .findFiles(new TextFileSelector(sourcefile.toString(), realSourceWildcard)); } if (list.length > 0) { boolean zipFiles = meta.isZipFiles(); if (zipFiles && data.zipFileLimit == 0) { masterZipfile = new File( System.getProperty("java.io.tmpdir") + Const.FILE_SEPARATOR + data.ZipFilename); zipOutputStream = new ZipOutputStream(new FileOutputStream(masterZipfile)); } for (int i = 0; i < list.length; i++) { file = KettleVFS.getFileObject(KettleVFS.getFilename(list[i]), getTransMeta()); if (zipFiles) { if (data.zipFileLimit == 0) { ZipEntry zipEntry = new ZipEntry(file.getName().getBaseName()); zipOutputStream.putNextEntry(zipEntry); // Now put the content of this file into this archive... BufferedInputStream inputStream = new BufferedInputStream( file.getContent().getInputStream()); int c; while ((c = inputStream.read()) >= 0) { zipOutputStream.write(c); } inputStream.close(); zipOutputStream.closeEntry(); } else { FileSize += file.getContent().getSize(); } } else { addAttachedFilePart(file); } } // end for if (zipFiles) { if (isDebug()) { logDebug(BaseMessages.getString(PKG, "Mail.Log.FileSize", "" + FileSize)); } if (isDebug()) { logDebug(BaseMessages.getString(PKG, "Mail.Log.LimitSize", "" + data.zipFileLimit)); } if (data.zipFileLimit > 0 && FileSize > data.zipFileLimit) { masterZipfile = new File(System.getProperty("java.io.tmpdir") + Const.FILE_SEPARATOR + data.ZipFilename); zipOutputStream = new ZipOutputStream(new FileOutputStream(masterZipfile)); for (int i = 0; i < list.length; i++) { file = KettleVFS.getFileObject(KettleVFS.getFilename(list[i]), getTransMeta()); ZipEntry zipEntry = new ZipEntry(file.getName().getBaseName()); zipOutputStream.putNextEntry(zipEntry); // Now put the content of this file into this archive... BufferedInputStream inputStream = new BufferedInputStream( file.getContent().getInputStream()); int c; while ((c = inputStream.read()) >= 0) { zipOutputStream.write(c); } inputStream.close(); zipOutputStream.closeEntry(); } } if (data.zipFileLimit > 0 && FileSize > data.zipFileLimit || data.zipFileLimit == 0) { file = KettleVFS.getFileObject(masterZipfile.getAbsolutePath(), getTransMeta()); addAttachedFilePart(file); } } } } else { logError(BaseMessages.getString(PKG, "Mail.Error.SourceFileFolderNotExists", realSourceFileFoldername)); } } } catch (Exception e) { logError(e.getMessage()); } finally { if (sourcefile != null) { try { sourcefile.close(); } catch (Exception e) { // Ignore errors } } if (file != null) { try { file.close(); } catch (Exception e) { // Ignore errors } } if (zipOutputStream != null) { try { zipOutputStream.finish(); zipOutputStream.close(); } catch (IOException e) { logError("Unable to close attachement zip file archive : " + e.toString()); } } } }
From source file:org.pentaho.di.trans.steps.script.ScriptAddedFunctions.java
public static double getFileSize(ScriptEngine actualContext, Bindings actualObject, Object[] ArgList, Object FunctionContext) { try {/*from w ww . ja va2 s. c o m*/ if (ArgList.length == 1 && !isNull(ArgList[0]) && !isUndefined(ArgList[0])) { if (ArgList[0].equals(null)) { return 0; } FileObject file = null; try { // Source file file = KettleVFS.getFileObject((String) ArgList[0]); long filesize = 0; if (file.exists()) { if (file.getType().equals(FileType.FILE)) { filesize = file.getContent().getSize(); } else { new RuntimeException("[" + (String) ArgList[0] + "] is not a file!"); } } else { new RuntimeException("file [" + (String) ArgList[0] + "] can not be found!"); } return filesize; } catch (IOException e) { throw new RuntimeException("The function call getFileSize throw an error : " + e.toString()); } finally { if (file != null) { try { file.close(); } catch (Exception e) { // Ignore errors } } } } else { throw new RuntimeException("The function call getFileSize is not valid."); } } catch (Exception e) { throw new RuntimeException(e.toString()); } }
From source file:org.pentaho.di.trans.steps.script.ScriptAddedFunctions.java
public static String getLastModifiedTime(ScriptEngine actualContext, Bindings actualObject, Object[] ArgList, Object FunctionContext) { try {/*from w w w .jav a2 s. co m*/ if (ArgList.length == 2 && !isNull(ArgList[0]) && !isUndefined(ArgList[0])) { if (ArgList[0].equals(null)) { return null; } FileObject file = null; try { // Source file file = KettleVFS.getFileObject((String) ArgList[0]); String dateformat = (String) ArgList[1]; if (isNull(dateformat)) { dateformat = "yyyy-MM-dd"; } String lastmodifiedtime = null; if (file.exists()) { java.util.Date lastmodifiedtimedate = new java.util.Date( file.getContent().getLastModifiedTime()); java.text.DateFormat dateFormat = new SimpleDateFormat(dateformat); lastmodifiedtime = dateFormat.format(lastmodifiedtimedate); } else { new RuntimeException("file [" + (String) ArgList[0] + "] can not be found!"); } return lastmodifiedtime; } catch (IOException e) { throw new RuntimeException( "The function call getLastModifiedTime throw an error : " + e.toString()); } finally { if (file != null) { try { file.close(); } catch (Exception e) { // Ignore errors } } } } else { throw new RuntimeException("The function call getLastModifiedTime is not valid."); } } catch (Exception e) { throw new RuntimeException(e.toString()); } }
From source file:org.pentaho.di.trans.steps.scriptvalues_mod.ScriptValuesAddedFunctions.java
public static double getFileSize(Context actualContext, Scriptable actualObject, Object[] ArgList, Function FunctionContext) { try {/* w ww . j av a2 s .com*/ if (ArgList.length == 1 && !isNull(ArgList[0]) && !isUndefined(ArgList[0])) { if (ArgList[0].equals(null)) { return 0; } FileObject file = null; try { // Source file file = KettleVFS.getFileObject(Context.toString(ArgList[0])); long filesize = 0; if (file.exists()) { if (file.getType().equals(FileType.FILE)) { filesize = file.getContent().getSize(); } else { Context.reportRuntimeError("[" + Context.toString(ArgList[0]) + "] is not a file!"); } } else { Context.reportRuntimeError("file [" + Context.toString(ArgList[0]) + "] can not be found!"); } return filesize; } catch (IOException e) { throw Context .reportRuntimeError("The function call getFileSize throw an error : " + e.toString()); } finally { if (file != null) { try { file.close(); } catch (Exception e) { // Ignore close errors } } } } else { throw Context.reportRuntimeError("The function call getFileSize is not valid."); } } catch (Exception e) { throw Context.reportRuntimeError(e.toString()); } }
From source file:org.pentaho.di.trans.steps.scriptvalues_mod.ScriptValuesAddedFunctions.java
public static String getLastModifiedTime(Context actualContext, Scriptable actualObject, Object[] ArgList, Function FunctionContext) { try {//w w w . j ava 2s .c om if (ArgList.length == 2 && !isNull(ArgList[0]) && !isUndefined(ArgList[0])) { if (ArgList[0].equals(null)) { return null; } FileObject file = null; try { // Source file file = KettleVFS.getFileObject(Context.toString(ArgList[0])); String dateformat = Context.toString(ArgList[1]); if (isNull(dateformat)) { dateformat = "yyyy-MM-dd"; } String lastmodifiedtime = null; if (file.exists()) { java.util.Date lastmodifiedtimedate = new java.util.Date( file.getContent().getLastModifiedTime()); java.text.DateFormat dateFormat = new SimpleDateFormat(dateformat); lastmodifiedtime = dateFormat.format(lastmodifiedtimedate); } else { Context.reportRuntimeError("file [" + Context.toString(ArgList[0]) + "] can not be found!"); } return lastmodifiedtime; } catch (IOException e) { throw Context.reportRuntimeError( "The function call getLastModifiedTime throw an error : " + e.toString()); } finally { if (file != null) { try { file.close(); } catch (Exception e) { // Ignore errors } } } } else { throw Context.reportRuntimeError("The function call getLastModifiedTime is not valid."); } } catch (Exception e) { throw Context.reportRuntimeError(e.toString()); } }
From source file:org.pentaho.di.trans.steps.symmetriccrypto.symmetricalgorithm.SymmetricCrypto.java
public void setSecretKeyFromFile(String filename) throws CryptoKeyException { FileObject file = null; try {// www . j av a 2 s . co m file = KettleVFS.getFileObject(filename); if (!file.exists()) { throw new CryptoException( BaseMessages.getString(PKG, "SymmetricCrypto.CanNotFindFile", file.getName())); } byte[] KeyBytes = new byte[(int) file.getContent().getSize()]; setSecretKey(KeyBytes); } catch (Exception e) { throw new CryptoKeyException(e); } finally { if (file != null) { try { file.close(); } catch (Exception e) { /* Ignore */ } } } }
From source file:org.pentaho.hadoop.mapreduce.test.TestSubmitMapReduceJob.java
@Test public void submitJob() throws Exception { String[] args = { "hdfs://" + hostname + ":" + hdfsPort + "/junit/wordcount/input", "hdfs://" + hostname + ":" + hdfsPort + "/junit/wordcount/output" }; JobConf conf = new JobConf(); conf.setJobName("wordcount"); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(IntWritable.class); File jar = new File("./test-res/pentaho-mapreduce-sample.jar"); URLClassLoader loader = new URLClassLoader(new URL[] { jar.toURI().toURL() }); conf.setMapperClass(//from w w w .j a va 2 s . c o m (Class<? extends Mapper>) loader.loadClass("org.pentaho.hadoop.mapreduce.sample.MRWordCount$Map")); conf.setCombinerClass((Class<? extends Reducer>) loader .loadClass("org.pentaho.hadoop.mapreduce.sample.MRWordCount$Reduce")); conf.setReducerClass((Class<? extends Reducer>) loader .loadClass("org.pentaho.hadoop.mapreduce.sample.MRWordCount$Reduce")); conf.setInputFormat(TextInputFormat.class); conf.setOutputFormat(TextOutputFormat.class); FileInputFormat.setInputPaths(conf, new Path(args[0])); FileOutputFormat.setOutputPath(conf, new Path(args[1])); conf.set("fs.default.name", "hdfs://" + hostname + ":" + hdfsPort); conf.set("mapred.job.tracker", hostname + ":" + trackerPort); conf.setJarByClass(loader.loadClass("org.pentaho.hadoop.mapreduce.sample.MRWordCount")); conf.setWorkingDirectory(new Path("/tmp/wordcount")); JobClient jobClient = new JobClient(conf); ClusterStatus status = jobClient.getClusterStatus(); assertEquals(State.RUNNING, status.getJobTrackerState()); RunningJob runningJob = jobClient.submitJob(conf); System.out.print("Running " + runningJob.getJobName() + ""); while (!runningJob.isComplete()) { System.out.print("."); Thread.sleep(500); } System.out.println(); System.out.println("Finished " + runningJob.getJobName() + "."); FileObject file = fsManager.resolveFile(buildHDFSURL("/junit/wordcount/output/part-00000")); String output = IOUtils.toString(file.getContent().getInputStream()); assertEquals("Bye\t1\nGoodbye\t1\nHadoop\t2\nHello\t2\nWorld\t2\n", output); }
From source file:org.pentaho.hadoop.mapreduce.test.TransMapReduceJobTestFIXME.java
@Test public void submitJob() throws Exception { String[] args = { "hdfs://" + hostname + ":" + hdfsPort + "/junit/wordcount/input", "hdfs://" + hostname + ":" + hdfsPort + "/junit/wordcount/output" }; JobConf conf = new JobConf(); conf.setJobName("wordcount"); KettleEnvironment.init();//from ww w . ja v a 2 s . c o m TransExecutionConfiguration transExecConfig = new TransExecutionConfiguration(); TransMeta transMeta = new TransMeta("./test-res/wordcount-mapper.ktr"); TransConfiguration transConfig = new TransConfiguration(transMeta, transExecConfig); conf.set("transformation-map-xml", transConfig.getXML()); transMeta = new TransMeta("./test-res/wordcount-reducer.ktr"); transConfig = new TransConfiguration(transMeta, transExecConfig); conf.set("transformation-reduce-xml", transConfig.getXML()); conf.set("transformation-map-input-stepname", "Injector"); conf.set("transformation-map-output-stepname", "Output"); conf.set("transformation-reduce-input-stepname", "Injector"); conf.set("transformation-reduce-output-stepname", "Output"); conf.setOutputKeyClass(Text.class); conf.setOutputValueClass(IntWritable.class); File jar = new File("./dist/pentaho-big-data-plugin-TRUNK-SNAPSHOT.jar"); URLClassLoader loader = new URLClassLoader(new URL[] { jar.toURI().toURL() }); conf.setMapperClass( (Class<? extends Mapper>) loader.loadClass("org.pentaho.hadoop.mapreduce.GenericTransMap")); conf.setCombinerClass( (Class<? extends Reducer>) loader.loadClass("org.pentaho.hadoop.mapreduce.GenericTransReduce")); conf.setReducerClass( (Class<? extends Reducer>) loader.loadClass("org.pentaho.hadoop.mapreduce.GenericTransReduce")); conf.setInputFormat(TextInputFormat.class); conf.setOutputFormat(TextOutputFormat.class); FileInputFormat.setInputPaths(conf, new Path(args[0])); FileOutputFormat.setOutputPath(conf, new Path(args[1])); conf.set("fs.default.name", "hdfs://" + hostname + ":" + hdfsPort); conf.set("mapred.job.tracker", hostname + ":" + trackerPort); conf.setJar(jar.toURI().toURL().toExternalForm()); conf.setWorkingDirectory(new Path("/tmp/wordcount")); JobClient jobClient = new JobClient(conf); ClusterStatus status = jobClient.getClusterStatus(); assertEquals(State.RUNNING, status.getJobTrackerState()); RunningJob runningJob = jobClient.submitJob(conf); System.out.print("Running " + runningJob.getJobName() + ""); while (!runningJob.isComplete()) { System.out.print("."); Thread.sleep(500); } System.out.println(); System.out.println("Finished " + runningJob.getJobName() + "."); FileObject file = fsManager.resolveFile(buildHDFSURL("/junit/wordcount/output/part-00000")); String output = IOUtils.toString(file.getContent().getInputStream()); assertEquals( "Bye\t4\nGood\t2\nGoodbye\t1\nHadoop\t2\nHello\t5\nThis\t1\nWorld\t5\nand\t1\ncounting\t1\nextra\t1\nfor\t1\nis\t1\nsome\t1\ntext\t1\nwords\t1\n", output); }
From source file:org.pentaho.hdfs.vfs.test.HDFSVFSTest.java
@Test public void readFile() throws Exception { assertNotNull("FileSystemManager is null", fsManager); FileObject hdfsFileOut = fsManager.resolveFile(buildHDFSURL("/junit/file.txt")); OutputStream out = hdfsFileOut.getContent().getOutputStream(); out.write(HELLO_HADOOP_STR.getBytes("UTF-8")); out.close();/*from w w w.jav a 2 s . c om*/ FileObject file = fsManager.resolveFile(buildHDFSURL("/junit/file.txt")); assertNotNull("File is null (could not resolve?)", file); String fileStr = IOUtils.toString(file.getContent().getInputStream(), "UTF-8"); assertEquals(HELLO_HADOOP_STR, fileStr); file.delete(); }
From source file:org.pentaho.hdfs.vfs.test.HDFSVFSTest.java
@Test public void writeFile() throws Exception { assertNotNull("FileSystemManager is null", fsManager); FileObject file = fsManager.resolveFile(buildHDFSURL("/junit/out.txt")); assertEquals(FileType.IMAGINARY, file.getType()); assertNotNull("File is null (could not resolve?)", file); OutputStream output = file.getContent().getOutputStream(); IOUtils.write(HELLO_HADOOP_STR, output); IOUtils.closeQuietly(output);/* www. j a v a 2s. co m*/ assertEquals(FileType.FILE, file.getType()); String fileStr = IOUtils.toString(file.getContent().getInputStream(), "UTF-8"); assertEquals(HELLO_HADOOP_STR, fileStr); file.delete(); file = fsManager.resolveFile(buildHDFSURL("/junit/out.txt")); assertEquals(FileType.IMAGINARY, file.getType()); }