Example usage for org.apache.commons.vfs FileObject getName

List of usage examples for org.apache.commons.vfs FileObject getName

Introduction

In this page you can find the example usage for org.apache.commons.vfs FileObject getName.

Prototype

public FileName getName();

Source Link

Document

Returns the name of this file.

Usage

From source file:org.pentaho.di.ui.trans.steps.textfileinput.TextFileInputDialog.java

private List<String> getFirst(int nrlines, boolean skipHeaders) throws KettleException {
    TextFileInputMeta meta = new TextFileInputMeta();
    getInfo(meta);/* w w  w .ja va  2  s .c  om*/
    FileInputList textFileList = meta.getTextFileList(transMeta);

    InputStream fi;
    CompressionInputStream f = null;
    StringBuilder lineStringBuilder = new StringBuilder(256);
    int fileFormatType = meta.getFileFormatTypeNr();

    List<String> retval = new ArrayList<String>();

    if (textFileList.nrOfFiles() > 0) {
        FileObject file = textFileList.getFile(0);
        try {
            fi = KettleVFS.getInputStream(file);

            CompressionProvider provider = CompressionProviderFactory.getInstance()
                    .createCompressionProviderInstance(meta.getFileCompression());
            f = provider.createInputStream(fi);

            InputStreamReader reader;
            if (meta.getEncoding() != null && meta.getEncoding().length() > 0) {
                reader = new InputStreamReader(f, meta.getEncoding());
            } else {
                reader = new InputStreamReader(f);
            }
            EncodingType encodingType = EncodingType.guessEncodingType(reader.getEncoding());

            int linenr = 0;
            int maxnr = nrlines + (meta.hasHeader() ? meta.getNrHeaderLines() : 0);

            if (skipHeaders) {
                // Skip the header lines first if more then one, it helps us position
                if (meta.isLayoutPaged() && meta.getNrLinesDocHeader() > 0) {
                    int skipped = 0;
                    String line = TextFileInput.getLine(log, reader, encodingType, fileFormatType,
                            lineStringBuilder);
                    while (line != null && skipped < meta.getNrLinesDocHeader() - 1) {
                        skipped++;
                        line = TextFileInput.getLine(log, reader, encodingType, fileFormatType,
                                lineStringBuilder);
                    }
                }

                // Skip the header lines first if more then one, it helps us position
                if (meta.hasHeader() && meta.getNrHeaderLines() > 0) {
                    int skipped = 0;
                    String line = TextFileInput.getLine(log, reader, encodingType, fileFormatType,
                            lineStringBuilder);
                    while (line != null && skipped < meta.getNrHeaderLines() - 1) {
                        skipped++;
                        line = TextFileInput.getLine(log, reader, encodingType, fileFormatType,
                                lineStringBuilder);
                    }
                }
            }

            String line = TextFileInput.getLine(log, reader, encodingType, fileFormatType, lineStringBuilder);
            while (line != null && (linenr < maxnr || nrlines == 0)) {
                retval.add(line);
                linenr++;
                line = TextFileInput.getLine(log, reader, encodingType, fileFormatType, lineStringBuilder);
            }
        } catch (Exception e) {
            throw new KettleException(
                    BaseMessages.getString(PKG, "TextFileInputDialog.Exception.ErrorGettingFirstLines",
                            "" + nrlines, file.getName().getURI()),
                    e);
        } finally {
            try {
                if (f != null) {
                    f.close();
                }
            } catch (Exception e) {
                // Ignore errors
            }
        }
    }

    return retval;
}

From source file:org.pentaho.di.www.AddExportServlet.java

public void doGet(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    if (isJettyMode() && !request.getRequestURI().startsWith(CONTEXT_PATH)) {
        return;// w  w  w. j a  va  2  s .  c o m
    }

    if (log.isDebug()) {
        logDebug("Addition of export requested");
    }

    PrintWriter out = response.getWriter();
    InputStream in = request.getInputStream(); // read from the client
    if (log.isDetailed()) {
        logDetailed("Encoding: " + request.getCharacterEncoding());
    }

    boolean isJob = TYPE_JOB.equalsIgnoreCase(request.getParameter(PARAMETER_TYPE));
    String load = request.getParameter(PARAMETER_LOAD); // the resource to load

    response.setContentType("text/xml");
    out.print(XMLHandler.getXMLHeader());

    response.setStatus(HttpServletResponse.SC_OK);

    OutputStream outputStream = null;

    try {
        FileObject tempFile = KettleVFS.createTempFile("export", ".zip", System.getProperty("java.io.tmpdir"));
        outputStream = KettleVFS.getOutputStream(tempFile, false);

        // Pass the input directly to a temporary file
        //
        // int size = 0;
        int c;
        while ((c = in.read()) != -1) {
            outputStream.write(c);
            // size++;
        }

        outputStream.flush();
        outputStream.close();
        outputStream = null; // don't close it twice

        String archiveUrl = tempFile.getName().toString();
        String fileUrl = null;

        String carteObjectId = null;
        SimpleLoggingObject servletLoggingObject = new SimpleLoggingObject(CONTEXT_PATH,
                LoggingObjectType.CARTE, null);

        // Now open the top level resource...
        //
        if (!Const.isEmpty(load)) {

            fileUrl = "zip:" + archiveUrl + "!" + load;

            if (isJob) {
                // Open the job from inside the ZIP archive
                //
                KettleVFS.getFileObject(fileUrl);

                JobMeta jobMeta = new JobMeta(fileUrl, null); // never with a repository
                // Also read the execution configuration information
                //
                String configUrl = "zip:" + archiveUrl + "!" + Job.CONFIGURATION_IN_EXPORT_FILENAME;
                Document configDoc = XMLHandler.loadXMLFile(configUrl);
                JobExecutionConfiguration jobExecutionConfiguration = new JobExecutionConfiguration(
                        XMLHandler.getSubNode(configDoc, JobExecutionConfiguration.XML_TAG));

                carteObjectId = UUID.randomUUID().toString();
                servletLoggingObject.setContainerObjectId(carteObjectId);
                servletLoggingObject.setLogLevel(jobExecutionConfiguration.getLogLevel());

                Job job = new Job(null, jobMeta, servletLoggingObject);

                // Do we need to expand the job when it's running?
                // Note: the plugin (Job and Trans) job entries need to call the delegation listeners in the parent job.
                //
                if (jobExecutionConfiguration.isExpandingRemoteJob()) {
                    job.addDelegationListener(new CarteDelegationHandler(getTransformationMap(), getJobMap()));
                }

                // store it all in the map...
                //
                synchronized (getJobMap()) {
                    getJobMap().addJob(job.getJobname(), carteObjectId, job,
                            new JobConfiguration(jobMeta, jobExecutionConfiguration));
                }

                // Apply the execution configuration...
                //
                log.setLogLevel(jobExecutionConfiguration.getLogLevel());
                job.setArguments(jobExecutionConfiguration.getArgumentStrings());
                jobMeta.injectVariables(jobExecutionConfiguration.getVariables());

                // Also copy the parameters over...
                //
                Map<String, String> params = jobExecutionConfiguration.getParams();
                for (String param : params.keySet()) {
                    String value = params.get(param);
                    jobMeta.setParameterValue(param, value);
                }

            } else {
                // Open the transformation from inside the ZIP archive
                //
                TransMeta transMeta = new TransMeta(fileUrl);
                // Also read the execution configuration information
                //
                String configUrl = "zip:" + archiveUrl + "!" + Trans.CONFIGURATION_IN_EXPORT_FILENAME;
                Document configDoc = XMLHandler.loadXMLFile(configUrl);
                TransExecutionConfiguration executionConfiguration = new TransExecutionConfiguration(
                        XMLHandler.getSubNode(configDoc, TransExecutionConfiguration.XML_TAG));

                carteObjectId = UUID.randomUUID().toString();
                servletLoggingObject.setContainerObjectId(carteObjectId);
                servletLoggingObject.setLogLevel(executionConfiguration.getLogLevel());

                Trans trans = new Trans(transMeta, servletLoggingObject);

                // store it all in the map...
                //
                getTransformationMap().addTransformation(trans.getName(), carteObjectId, trans,
                        new TransConfiguration(transMeta, executionConfiguration));
            }
        } else {
            fileUrl = archiveUrl;
        }

        out.println(new WebResult(WebResult.STRING_OK, fileUrl, carteObjectId));
    } catch (Exception ex) {
        out.println(new WebResult(WebResult.STRING_ERROR, Const.getStackTracker(ex)));
    } finally {
        if (outputStream != null) {
            outputStream.close();
        }
    }
}

From source file:org.pentaho.hdfs.vfs.HDFSFileObject.java

protected void doRename(FileObject newfile) throws Exception {
    hdfs.rename(new Path(getName().getPath()), new Path(newfile.getName().getPath()));
}

From source file:org.pentaho.reporting.libraries.pensol.VfsTest.java

public void testInitialLoading() throws FileSystemException {
    final FileObject nonExistent = VFS.getManager().resolveFile("test-solution://localhost/non-existent");
    assertFalse(nonExistent.exists());/*from  w  w  w. j a v  a 2  s  .  c o m*/
    assertEquals(FileType.IMAGINARY, nonExistent.getType());
    assertEquals("non-existent", nonExistent.getName().getBaseName());
    final FileObject directory = VFS.getManager().resolveFile("test-solution://localhost/bi-developers");
    assertTrue(directory.exists());
    assertEquals(FileType.FOLDER, directory.getType());
    assertEquals("bi-developers", directory.getName().getBaseName());
    final FileObject file = VFS.getManager()
            .resolveFile("test-solution://localhost/bi-developers/analysis/query1.xaction");
    assertTrue(file.exists());
    assertEquals(FileType.FILE, file.getType());
    assertEquals("query1.xaction", file.getName().getBaseName());
}

From source file:org.pentaho.s3.S3Test.java

private void printFileObject(FileObject fileObject, int depth) throws Exception {
    for (int i = 0; i < depth; i++) {
        System.out.print("    ");
    }/* ww  w . j a v a2 s.c om*/
    System.out.println(fileObject.getName().getBaseName());

    if (fileObject.getType() == FileType.FOLDER) {
        FileObject[] children = fileObject.getChildren();
        for (FileObject child : children) {
            printFileObject(child, depth + 1);
        }
    }
}

From source file:org.pentaho.s3.vfs.S3FileObject.java

protected void doRename(FileObject newfile) throws Exception {
    if (getType().equals(FileType.FOLDER)) {
        throw new FileSystemException("vfs.provider/rename-not-supported.error");
    }//w  ww .j a v  a  2  s.  c o m
    S3Object s3Object = getS3Object(false);
    s3Object.setKey(newfile.getName().getBaseName());
    fileSystem.getS3Service().renameObject(getS3BucketName(), getName().getBaseName(), s3Object);
    s3ChildrenMap.remove(getS3BucketName());
}

From source file:org.richfaces.cdk.rd.JarResourceScanner.java

protected boolean isAcceptable(FileObject fileObject) {

    for (int i = 0; i < patterns.length; i++) {
        if (SelectorUtils.matchPath(patterns[i], fileObject.getName().getURI())) {
            return true;
        }//from   w w w  . j a v  a 2  s.c  o  m
    }

    return false;
}

From source file:org.richfaces.cdk.rd.mojo.ResourceDependencyMojo.java

public void execute() throws MojoExecutionException, MojoFailureException {

    try {/*  www.j  a va2 s  .  co m*/
        Set<Artifact> artifacts = resolveDependenciesArtifacts();

        Digester defaultDigester = createDigester();

        Map<String, Components> components = new HashMap<String, Components>();

        if (xmlConfigPatterns == null) {
            xmlConfigPatterns = PluginUtils.DEFAULT_CONFIG_PATTERNS;
        }

        for (Artifact artifact : artifacts) {
            FileObject jar = resolveArtifact(artifact);
            getLog().info("Process jar: " + jar.getName().getFriendlyURI());
            FileObject[] configs = PluginUtils.resolveConfigsFromJar(jar, xmlConfigPatterns);

            if (configs.length == 0) {
                getLog().info("no dependecy files found");
            } else {
                getLog().info("next dependency files found");
                for (FileObject config : configs) {
                    getLog().info(config.getName().getBaseName());
                }
            }

            components.putAll(PluginUtils.processConfigs(configs, defaultDigester));
        }

        if (!webSourceDirectory.exists()) {
            webSourceDirectory.mkdirs();
        }

        ComponentsHandler handler = findComponents(webSourceDirectory, components, xhtmlIncludes,
                xhtmlExcludes);

        ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader();
        ClassLoader contextClassLoader = createClassLoader();
        Thread.currentThread().setContextClassLoader(contextClassLoader);

        if (contextClassLoader != null) {

            Set<String> scripts = handler.getScripts();

            scriptFilePath = scriptFilePath.endsWith(".js") ? scriptFilePath : scriptFilePath + ".js";
            File scriptFile = new File(outputResourceDirectory, scriptFilePath);
            if (!scriptFile.exists()) {
                File parent = scriptFile.getParentFile();
                if (parent != null && !parent.exists()) {
                    parent.mkdirs();
                }
            }

            ScriptAssembler scriptAssembler = new ScriptAssembler(getLog());

            if (!scripts.isEmpty()) {
                getLog().info("Start merge scripts to the: " + scriptFile.getPath());
                mergeResources(scriptFile, scriptAssembler, beforeScriptIncludes, afterScriptIncludes, scripts);
            }

            Set<String> styles = handler.getStyles();

            styleFilePath = styleFilePath.endsWith(".xcss") ? styleFilePath : styleFilePath + ".xcss";

            File styleFile = new File(outputResourceDirectory, styleFilePath);
            File parent = styleFile.getParentFile();
            if (parent != null && !parent.exists()) {
                parent.mkdirs();
            }

            StyleAssembler styleAssembler = new StyleAssembler(getLog());
            styleAssembler.setVelocityComponent(velocity);

            if (!styles.isEmpty()) {
                getLog().info("Start merge styles to the: " + styleFile.getPath());
                mergeResources(styleFile, styleAssembler, beforeStyleIncludes, afterStyleIncludes, styles);
            }

            Resource resource = new Resource();
            resource.setDirectory(outputResourceDirectory.getPath());
            project.addResource(resource);
        }

        Thread.currentThread().setContextClassLoader(oldClassLoader);

    } catch (Exception e) {
        getLog().error("Error generate resource", e);
        throw new MojoExecutionException(e.getMessage(), e);
    }

}

From source file:org.sonatype.gshell.commands.bsf.ScriptCommand.java

private String detectLanguage(final FileObject file) throws Exception {
    assert file != null;

    return BSFManager.getLangFromFilename(file.getName().getBaseName());
}

From source file:org.sonatype.gshell.commands.bsf.ScriptCommand.java

private Object exec(final CommandContext context) throws Exception {
    assert context != null;
    IO io = context.getIo();//ww  w  .  ja v a  2s . c o m

    FileObject cwd = fileSystemAccess.getCurrentDirectory(context.getVariables());
    FileObject file = fileSystemAccess.resolveFile(cwd, path);

    if (!file.exists()) {
        io.error("File not found: {}", file.getName()); // TODO: i18n
        return Result.FAILURE;
    } else if (!file.getType().hasContent()) {
        io.error("File has not content: {}", file.getName()); // TODO: i18n
        return Result.FAILURE;
    } else if (!file.isReadable()) {
        io.error("File is not readable: {}", file.getName()); // TODO: i18n
        return Result.FAILURE;
    }

    if (language == null) {
        language = detectLanguage(file);
    }

    BSFEngine engine = createEngine(context);

    byte[] bytes = FileUtil.getContent(file);
    String script = new String(bytes);

    log.info("Evaluating file ({}): {}", language, path); // TODO: i18n

    try {
        return engine.eval(file.getName().getBaseName(), 1, 1, script);
    } finally {
        engine.terminate();
        file.close();
    }
}