Example usage for org.apache.commons.vfs2 FileObject getContent

List of usage examples for org.apache.commons.vfs2 FileObject getContent

Introduction

In this page you can find the example usage for org.apache.commons.vfs2 FileObject getContent.

Prototype

FileContent getContent() throws FileSystemException;

Source Link

Document

Returns this file's content.

Usage

From source file:org.pentaho.amazon.emr.job.AmazonElasticMapReduceJobExecutor.java

@Override
public File createStagingFile() throws IOException, KettleException {
    // pull down .jar file from VSF
    FileObject jarFile = KettleVFS.getFileObject(buildFilename(jarUrl));
    File tmpFile = File.createTempFile("customEMR", "jar");
    tmpFile.deleteOnExit();/*from ww  w.  j  a  v a  2s .  c o m*/
    FileOutputStream tmpFileOut = new FileOutputStream(tmpFile);
    IOUtils.copy(jarFile.getContent().getInputStream(), tmpFileOut);
    localFileUrl = tmpFile.toURI().toURL();
    setS3BucketKey(jarFile);
    return tmpFile;
}

From source file:org.pentaho.amazon.hive.job.AmazonHiveJobExecutor.java

@Override
public File createStagingFile() throws IOException, KettleException {
    // pull down .q file from VSF
    FileObject qFile = KettleVFS.getFileObject(buildFilename(qUrl));
    File tmpFile = File.createTempFile("customEMR", "q");
    tmpFile.deleteOnExit();/*from  ww w.  ja  va 2s  .c om*/
    FileOutputStream tmpFileOut = new FileOutputStream(tmpFile);
    IOUtils.copy(qFile.getContent().getInputStream(), tmpFileOut);
    //localFileUrl = tmpFile.toURI().toURL();
    setS3BucketKey(qFile);
    return tmpFile;
}

From source file:org.pentaho.di.core.hadoop.HadoopConfigurationBootstrap.java

public synchronized List<HadoopConfigurationInfo> getHadoopConfigurationInfos()
        throws KettleException, ConfigurationException, IOException {
    List<HadoopConfigurationInfo> result = new ArrayList<>();
    FileObject hadoopConfigurationsDir = resolveHadoopConfigurationsDirectory();
    // If the folder doesn't exist, return an empty list
    if (hadoopConfigurationsDir.exists()) {
        String activeId = getActiveConfigurationId();
        String willBeActiveId = getWillBeActiveConfigurationId();
        for (FileObject childFolder : hadoopConfigurationsDir.getChildren()) {
            if (childFolder.getType() == FileType.FOLDER) {
                String id = childFolder.getName().getBaseName();
                FileObject configPropertiesFile = childFolder.getChild(CONFIG_PROPERTIES);
                if (configPropertiesFile.exists()) {
                    Properties properties = new Properties();
                    properties.load(configPropertiesFile.getContent().getInputStream());
                    result.add(new HadoopConfigurationInfo(id, properties.getProperty("name", id),
                            id.equals(activeId), willBeActiveId.equals(id)));
                }//from  w  ww .j av  a  2s. c om
            }
        }
    }
    return result;
}

From source file:org.pentaho.di.core.logging.Log4jFileAppenderTest.java

@Before
public void before() throws IOException {
    outputStream = mock(OutputStream.class);
    FileContent fileContent = mock(FileContent.class);
    when(fileContent.getOutputStream(anyBoolean())).thenReturn(outputStream);
    FileObject file = mock(FileObject.class);
    when(file.getContent()).thenReturn(fileContent);
    log4jFileAppender = new Log4jFileAppender(file);
}

From source file:org.pentaho.di.job.entries.googledrive.JobEntryGoogleDriveExport.java

protected static void exportFile(Drive driveService, File driveFile, FileObject targetFile,
        GoogleDriveExportFormat exportMapping) throws KettleException {
    Exception savedException = null;
    if (exportMapping != null) {
        FileObject tempFile = KettleVFS.createTempFile(JobEntryGoogleDriveExport.class.getSimpleName(), ".tmp",
                System.getProperty("java.io.tmpdir"));
        try {/*from   w  w  w  .j  a v  a2  s .  c  om*/
            OutputStream fos = tempFile.getContent().getOutputStream();
            BufferedOutputStream bos = new BufferedOutputStream(fos);
            try {
                driveService.files().export(driveFile.getId(), exportMapping.getMimeType())
                        .executeMediaAndDownloadTo(bos);
            } catch (IOException e) {
                // Throw this later, we want to close the output stream first
                savedException = new KettleException(
                        BaseMessages.getString(PKG, "GoogleDriveExport.Error.ExportingFile"), e);
            }
            try {
                bos.close();
            } catch (IOException ignore) {
                // Ignore
            }
            try {
                fos.close();
            } catch (IOException ignore) {
                // Ignore
            }
        } catch (IOException e) {
            savedException = new KettleException(
                    BaseMessages.getString(PKG, "GoogleDriveExport.Error.ExportingFile"), e);
        }
        if (tempFile != null) {
            try {
                targetFile.copyFrom(tempFile, Selectors.SELECT_SELF);
            } catch (FileSystemException e) {
                savedException = new KettleException(
                        BaseMessages.getString(PKG, "GoogleDriveExport.Error.MovingFileFromTemp"), e);
            }
        }
        if (savedException != null) {
            try {
                if (targetFile.exists()) {
                    targetFile.delete();
                }
            } catch (FileSystemException ignore) {
                // Ignore, couldn't delete a bad output file
            }
            throw new KettleException(savedException);
        }
    }
}

From source file:org.pentaho.di.job.entries.sftp.SFTPClientIT.java

@Test
public void getFile() throws Exception {
    final byte[] data = "getFile()".getBytes();

    channel.connect();//from  ww w . ja  v  a 2s  .c om
    channel.put(new ByteArrayInputStream(data), "downloaded.txt");

    client.get(KettleVFS.getFileObject("ram://downloaded.txt"), "downloaded.txt");

    FileObject downloaded = KettleVFS.getFileObject("ram://downloaded.txt");
    assertTrue(downloaded.exists());
    assertTrue(IOUtils.contentEquals(downloaded.getContent().getInputStream(), new ByteArrayInputStream(data)));
}

From source file:org.pentaho.di.job.entries.trans.JobEntryTransIntIT.java

private String createPDI14676Transformation() throws IOException, KettleException {
    // Setup Transformation
    String rowGenStepName = "Generate Rows";
    RowGeneratorMeta rowGenMeta = new RowGeneratorMeta();
    rowGenMeta.setRowLimit(String.valueOf(Integer.MAX_VALUE));
    rowGenMeta.setNeverEnding(true);//from w  w w .  j  av  a  2  s.  com
    rowGenMeta.setIntervalInMs("0");
    rowGenMeta.allocate(0);

    TransMeta tMeta = TransTestFactory.generateTestTransformation(new Variables(), rowGenMeta, rowGenStepName);

    // Remove the Injector step, as it's not needed for this transformation
    TransHopMeta hopToRemove = tMeta.findTransHop(tMeta.findStep(TransTestFactory.INJECTOR_STEPNAME),
            tMeta.findStep(rowGenStepName));
    tMeta.removeTransHop(tMeta.indexOfTransHop(hopToRemove));
    tMeta.removeStep(tMeta.indexOfStep(tMeta.findStep(TransTestFactory.INJECTOR_STEPNAME)));

    // Write transformation to temp file, for use within a job
    String transFilename = TestUtilities.createEmptyTempFile(this.getClass().getSimpleName() + "_PDI14676_",
            ".ktr");
    FileObject transFile = TestUtils.getFileObject(transFilename);
    OutputStream outStream = transFile.getContent().getOutputStream();
    PrintWriter pw = new PrintWriter(outStream);
    pw.write(tMeta.getXML());
    pw.close();
    outStream.close();
    return transFilename;
}

From source file:org.pentaho.di.job.entries.zipfile.JobEntryZipFileIT.java

@Test
public void processFile_ReturnsTrue_OnSuccess() throws Exception {
    final String zipPath = "ram://pdi-15013.zip";
    final String content = "temp file";
    final File tempFile = createTempFile(content);
    tempFile.deleteOnExit();//from  w  w w .j av a 2  s.c om
    try {
        Result result = new Result();
        JobEntryZipFile entry = new JobEntryZipFile();
        assertTrue(entry.processRowFile(new Job(), result, zipPath, null, null, tempFile.getAbsolutePath(),
                null, false));
    } finally {
        tempFile.delete();
    }

    FileObject zip = KettleVFS.getFileObject(zipPath);
    assertTrue("Zip archive should be created", zip.exists());

    ByteArrayOutputStream os = new ByteArrayOutputStream();
    IOUtils.copy(zip.getContent().getInputStream(), os);

    ZipInputStream zis = new ZipInputStream(new ByteArrayInputStream(os.toByteArray()));
    ZipEntry entry = zis.getNextEntry();
    assertEquals("Input file should be put into the archive", tempFile.getName(), entry.getName());

    os.reset();
    IOUtils.copy(zis, os);
    assertEquals("File's content should be equal to original", content, new String(os.toByteArray()));
}

From source file:org.pentaho.di.plugins.fileopensave.providers.vfs.model.VFSDirectory.java

public static VFSDirectory create(String parent, FileObject fileObject, String connection) {
    VFSDirectory vfsDirectory = new VFSDirectory();
    vfsDirectory.setName(fileObject.getName().getBaseName());
    vfsDirectory.setPath(fileObject.getName().getFriendlyURI());
    vfsDirectory.setParent(parent);/*  www  .j  a v  a2 s  .c o m*/
    vfsDirectory.setConnection(connection);
    vfsDirectory.setRoot(VFSFileProvider.NAME);
    vfsDirectory.setCanEdit(true);
    vfsDirectory.setHasChildren(true);
    vfsDirectory.setCanAddChildren(true);
    try {
        vfsDirectory.setDate(new Date(fileObject.getContent().getLastModifiedTime()));
    } catch (FileSystemException e) {
        vfsDirectory.setDate(new Date());
    }
    return vfsDirectory;
}

From source file:org.pentaho.di.plugins.fileopensave.providers.vfs.model.VFSFile.java

public static VFSFile create(String parent, FileObject fileObject, String connection) {
    VFSFile vfsFile = new VFSFile();
    vfsFile.setName(fileObject.getName().getBaseName());
    vfsFile.setPath(fileObject.getName().getFriendlyURI());
    vfsFile.setParent(parent);//from w w w.j av a  2 s  .  co m
    vfsFile.setConnection(connection);
    vfsFile.setRoot(VFSFileProvider.NAME);
    vfsFile.setCanEdit(true);
    try {
        vfsFile.setDate(new Date(fileObject.getContent().getLastModifiedTime()));
    } catch (FileSystemException ignored) {
        vfsFile.setDate(new Date());
    }
    return vfsFile;
}