Example usage for org.apache.commons.vfs FileObject getName

List of usage examples for org.apache.commons.vfs FileObject getName

Introduction

In this page you can find the example usage for org.apache.commons.vfs FileObject getName.

Prototype

public FileName getName();

Source Link

Document

Returns the name of this file.

Usage

From source file:org.kalypso.dwd.servlet.dwdfilecopy.DWDCopyTask.java

@Override
public void run() {
    FileObject newFile = null;

    try {// w  w  w  .j  a  va  2 s  . c o  m
        /* Check for the file or the base file (this could be a directory). */
        m_fo = m_fsManager.resolveFile(m_URI);

        if (m_fo.getType() != FileType.FOLDER) {
            System.out.println("The URI " + m_URI + " is no folder.");
            return;
        }

        /* Get all elements in this directory. */
        m_list = m_fo.getChildren();

        if (m_list.length == 0) {
            DWDFileCopyServlet.LOG.warning("There are no files in the Source:" + m_fo.getName().toString());
            return;
        }

        /* Find the newest file. */
        newFile = getNewestFile();

        if (newFile == null)
            return;

        DWDFileCopyServlet.LOG.info("Newest file: " + newFile.getName().getBaseName().toString());
    } catch (FileSystemException e) {
        DWDFileCopyServlet.LOG.warning("Error resolving the URI: " + e.getLocalizedMessage());
        return;
    } finally {
    }

    // looping twice over this code in the case an exception
    // occurs, we try it again...
    for (int i = 0; i < 2; i++) {
        FileOutputStream os = null;
        InputStream is = null;

        try {
            final Date newestDate = getDateFromRaster(newFile, m_dateFormat);
            final Date destFileDate = getDateFromRasterContent(m_destFile);

            DWDFileCopyServlet.LOG.info("Date of newest file: " + newestDate);
            DWDFileCopyServlet.LOG.info("Date of destination file: " + destFileDate);

            // if dest file either does not exist or is not up to date, overwrite with current DWD forecast
            if (destFileDate == null || newestDate.after(destFileDate)) {
                /* Copy the newest file. */
                DWDFileCopyServlet.LOG.info("Copying ...");

                final File dwdDest;

                if (m_destUpdate)
                    dwdDest = new File(m_destFile.getParentFile(), newFile.getName().getBaseName());
                else
                    dwdDest = m_destFile;

                DWDFileCopyServlet.LOG.info("Copying DWD-File \"" + newFile.getName().getBaseName() + "\" to: "
                        + dwdDest.getAbsolutePath());

                os = new FileOutputStream(dwdDest);
                is = newFile.getContent().getInputStream();

                /* The copy operation. */
                IOUtils.copy(is, os);

                os.close();
                is.close();

                // update file contents
                if (m_destUpdate) {
                    DWDFileCopyServlet.LOG.info("Updating " + m_destFile.getName() + " from " + dwdDest);
                    DWDRasterHelper.updateDWDFileContents(dwdDest, m_destFile, m_dateFormat);

                    m_destFile.setLastModified(newFile.getContent().getLastModifiedTime());

                    final boolean deleted = dwdDest.delete();

                    if (!deleted)
                        DWDFileCopyServlet.LOG
                                .warning("Could not delete temp DWD-File \"" + dwdDest.getName() + "\"");
                }
            }

            // delete source file if flag is set
            if (m_srcDel) {
                try {
                    /* Delete the old files. */
                    DWDFileCopyServlet.LOG.info("Deleting " + newFile.getName().getBaseName());

                    final boolean deleted = newFile.delete();
                    if (!deleted)
                        DWDFileCopyServlet.LOG.warning(
                                "Could not delete DWD-File \"" + newFile.getName().getBaseName() + "\"");
                } catch (final IOException e) {
                    DWDFileCopyServlet.LOG
                            .warning("Could not delete DWD-File \"" + newFile.getName().getBaseName() + "\"");

                    if (m_debug)
                        e.printStackTrace();
                }
            }

            // no exception, so end loop here
            return;
        } catch (final IOException e) {
            DWDFileCopyServlet.LOG.warning("Could not copy DWD-File \"" + newFile.getName().getBaseName()
                    + "\" to folder: " + m_destFile.getAbsolutePath() + " due to: " + e.getLocalizedMessage());

            if (m_debug)
                e.printStackTrace();
        } catch (final DWDException e) {
            DWDFileCopyServlet.LOG.warning("DWD-File could not be updated: " + e.getLocalizedMessage());
        } finally {
            IOUtils.closeQuietly(is);
            IOUtils.closeQuietly(os);
        }

        try {
            // make some pause before continuing
            Thread.sleep(500);
        } catch (final InterruptedException ignored) {
            // empty
        }
    }
}

From source file:org.kalypso.dwd.servlet.dwdfilecopy.DWDCopyTask.java

/**
 * Return the date of the dwd forecast file. The date is coded in the file name. Example filename for dwd raster
 * format: "lm_2004_11_10_00" and its format would be 'lm_'yyyy'_'MM'_'dd'_'hh
 *//*from  ww w  . j  a v  a2s . c  o  m*/
public static Date getDateFromRaster(final FileObject file, final SimpleDateFormat df) {
    try {
        return df.parse(file.getName().getBaseName());
    } catch (final ParseException e) {
        DWDFileCopyServlet.LOG.fine("DWD-Forecast filename \"" + file.getName().getBaseName().toString()
                + "\" has not a valid format, should be:" + df.toPattern());
        return null;
    }
}

From source file:org.kalypso.simulation.grid.GridJobSubmitter.java

public void submitJob(final FileObject workingDir, final String executable, ISimulationMonitor monitor,
        String... arguments) throws SimulationException {
    if (monitor == null) {
        monitor = new NullSimulationMonitor();
    }//from  w w  w  .  j  a  v  a 2 s.c  o  m

    // prepare streams
    FileObject stdoutFile = null;
    FileObject stderrFile = null;
    int returnCode = IStatus.ERROR;
    try {
        // stream stdout and stderr to files
        stdoutFile = workingDir.resolveFile("stdout");
        stderrFile = workingDir.resolveFile("stderr");

        // create process handle
        final String processFactoryId = "org.kalypso.simulation.gridprocess";
        // TODO: refactor so tempdir is created inside process
        final String tempDirName = workingDir.getName().getBaseName();
        final IProcess process = KalypsoCommonsExtensions.createProcess(processFactoryId, tempDirName,
                executable, arguments);
        // process.setProgressMonitor( new SimulationMonitorAdaptor( monitor ) );
        process.environment().put("OMP_NUM_THREADS", "4");

        final FileContent stdOutContent = stdoutFile.getContent();
        final OutputStream stdOut = stdOutContent.getOutputStream();
        final FileContent stdErrContent = stderrFile.getContent();
        final OutputStream stdErr = stdErrContent.getOutputStream();

        // stage-in files
        final FileSystemManager manager = workingDir.getFileSystem().getFileSystemManager();
        for (final URI einput : m_externalInputs.keySet()) {
            final FileObject inputFile = manager.resolveFile(getUriAsString(einput));
            final String destName = m_externalInputs.get(einput);
            if (destName != null) {
                final FileObject destFile = workingDir.resolveFile(destName);
                VFSUtil.copy(inputFile, destFile, null, true);
            } else {
                VFSUtil.copy(inputFile, workingDir, null, true);
            }
        }

        // start process
        returnCode = process.startProcess(stdOut, stdErr, null, null);
    } catch (final CoreException e) {
        // when process cannot be created
        throw new SimulationException("Could not create process.", e);
    } catch (final ProcessTimeoutException e) {
        e.printStackTrace();
    } catch (final FileNotFoundException e) {
        // can only happen when files cannot be created in tmpdir
        throw new SimulationException("Could not create temporary files for stdout and stderr.", e);
    } catch (final IOException e) {
        throw new SimulationException("Process I/O error.", e);
    } finally {
        // close files
        if (stdoutFile != null) {
            try {
                stdoutFile.getContent().close();
            } catch (final FileSystemException e) {
                // gobble
            }
        }
        if (stderrFile != null) {
            try {
                stderrFile.getContent().close();
            } catch (final FileSystemException e) {
                // gobble
            }
        }
    }

    // process failure handling
    if (returnCode != IStatus.OK) {
        String errString = "Process failed.";
        try {
            final FileContent content = stderrFile.getContent();
            final InputStream input2 = content.getInputStream();
            errString = errString + "\n" + IOUtils.toString(input2);
            content.close();
        } catch (final IOException e) {
            // ignore
        }
        monitor.setFinishInfo(returnCode, errString);
        throw new SimulationException(errString);
    } else {
        monitor.setFinishInfo(IStatus.OK, "Process finished successfully.");
    }
}

From source file:org.mule.transports.vfs.VFSReceiver.java

private void processFile(FileObject fileObject) throws UMOException {
    UMOMessageAdapter msgAdapter = connector.getMessageAdapter(fileObject);
    msgAdapter.setProperty(VFSConnector.PROPERTY_ORIGINAL_FILENAME, fileObject.getName().getPath());
    UMOMessage message = new MuleMessage(msgAdapter);
    routeMessage(message, endpoint.isSynchronous());
}

From source file:org.mule.transports.vfs.VFSReceiver.java

protected boolean hasChanged(FileObject fileObject) {
    boolean changed = false;
    String key = fileObject.getName().getPath();
    long checksum = 0;
    if (checksumMap.containsKey(key)) {
        checksum = ((Long) checksumMap.get(key)).longValue();
    }/*from w  w w. j a va2s.  co m*/
    long newChecksum = 0;
    CheckedInputStream checkedInputStream = null;
    try {
        InputStream inputStream = fileObject.getContent().getInputStream();
        checkedInputStream = new CheckedInputStream(inputStream, new Adler32());
        int bufferSize = 1;
        if (inputStream.available() > 0) {
            bufferSize = inputStream.available();
        }
        byte[] buffer = new byte[bufferSize];
        while (checkedInputStream.read(buffer) > -1) {
            ;
        }
        newChecksum = checkedInputStream.getChecksum().getValue();
        if (newChecksum != checksum) {
            if (logger.isDebugEnabled()) {
                logger.debug("calculated a new checksum of " + newChecksum);
            }
            checksumMap.put(key, new Long(newChecksum));
            changed = true;
        }
    } catch (IOException e) {
        connector.handleException(e);
    }
    return changed;
}

From source file:org.nanocontainer.deployer.NanoContainerDeployer.java

/**
 * Deploys an application.//from w  ww.  j  a v a  2 s.  co m
 *
 * @param applicationFolder the root applicationFolder of the application.
 * @param parentClassLoader the classloader that loads the application classes.
 * @param parentContainerRef reference to the parent container (can be used to lookup components form a parent container).
 * @return an ObjectReference holding a PicoContainer with the deployed components
 * @throws org.apache.commons.vfs.FileSystemException if the file structure was bad.
 * @throws org.nanocontainer.integrationkit.PicoCompositionException if the deployment failed for some reason.
 */
public ObjectReference deploy(FileObject applicationFolder, ClassLoader parentClassLoader,
        ObjectReference parentContainerRef) throws FileSystemException, ClassNotFoundException {
    ClassLoader applicationClassLoader = new VFSClassLoader(applicationFolder, fileSystemManager,
            parentClassLoader);

    FileObject deploymentScript = getDeploymentScript(applicationFolder);

    ObjectReference result = new SimpleReference();

    String extension = "." + deploymentScript.getName().getExtension();
    Reader scriptReader = new InputStreamReader(deploymentScript.getContent().getInputStream());
    String builderClassName = ScriptedContainerBuilderFactory.getBuilderClassName(extension);

    ScriptedContainerBuilderFactory scriptedContainerBuilderFactory = new ScriptedContainerBuilderFactory(
            scriptReader, builderClassName, applicationClassLoader);
    ContainerBuilder builder = scriptedContainerBuilderFactory.getContainerBuilder();
    builder.buildContainer(result, parentContainerRef, null, true);

    return result;
}

From source file:org.nanocontainer.deployer.NanoContainerDeployer.java

private FileObject getDeploymentScript(FileObject applicationFolder) throws FileSystemException {
    final FileObject metaInf = applicationFolder.getChild("META-INF");
    if (metaInf == null) {
        throw new FileSystemException("Missing META-INF folder in " + applicationFolder.getName().getPath());
    }/*from w ww .j  a  v a 2s .  co m*/
    final FileObject[] nanocontainerScripts = metaInf.findFiles(new FileSelector() {
        public boolean includeFile(FileSelectInfo fileSelectInfo) throws Exception {
            return fileSelectInfo.getFile().getName().getBaseName().startsWith("nanocontainer");
        }

        public boolean traverseDescendents(FileSelectInfo fileSelectInfo) throws Exception {
            return true;
        }
    });
    if (nanocontainerScripts == null || nanocontainerScripts.length < 1) {
        throw new FileSystemException("No deployment script (nanocontainer.[groovy|bsh|js|py|xml]) in "
                + applicationFolder.getName().getPath() + "/META-INF");
    }
    return nanocontainerScripts[0];
}

From source file:org.objectweb.proactive.extensions.dataspaces.vfs.VFSSpacesMountManagerImpl.java

/**
 * Internal method for resolving a file, will mount the file system if it is not mounted yet
 * @param uri virtual uri of the file/*www .jav  a  2s . c o m*/
 * @param ownerActiveObjectId Id of active object requesting this file
 * @param spaceRootFOUri root file system to use
 * @return
 * @throws FileSystemException
 */
private DataSpacesFileObject doResolveFile(final DataSpacesURI uri, final String ownerActiveObjectId,
        String spaceRootFOUri) throws FileSystemException {

    DataSpacesURI spacePart = uri.getSpacePartOnly();

    if (spaceRootFOUri != null) {
        ensureFileSystemIsMounted(spacePart, spaceRootFOUri);
    } else {
        try {
            readLock.lock();
            LinkedHashSet<String> los = accessibleFileObjectUris.get(spacePart);
            spaceRootFOUri = los.iterator().next();
        } finally {
            readLock.unlock();
        }
        ensureFileSystemIsMounted(spacePart, spaceRootFOUri);
    }

    final String relativeToSpace = uri.getRelativeToSpace();
    try {
        readLock.lock();

        if (!mountedSpaces.containsKey(spacePart)) {
            throw new FileSystemException("Could not access file that should exist (be mounted)");
        }

        final ConcurrentHashMap<String, FileObject> spaceRoots = mountedSpaces.get(spacePart);
        FileObject spaceRoot = spaceRoots.get(spaceRootFOUri);
        FileName dataSpaceVFSFileName = null;

        final FileObject file;
        // the dataspace "File name" (it is actually a File Path) is computed using the Virtual Space root
        if (dataSpaceVFSFileName == null) {
            dataSpaceVFSFileName = spaceRoot.getName();
        }
        try {
            if (relativeToSpace == null)
                file = spaceRoot;
            else
                file = spaceRoot.resolveFile(relativeToSpace);
            final DataSpacesLimitingFileObject limitingFile = new DataSpacesLimitingFileObject(file, spacePart,
                    spaceRoot.getName(), ownerActiveObjectId);
            return new VFSFileObjectAdapter(limitingFile, spacePart, dataSpaceVFSFileName,
                    new ArrayList<String>(accessibleFileObjectUris.get(spacePart)), spaceRootFOUri, this,
                    ownerActiveObjectId);
        } catch (org.apache.commons.vfs.FileSystemException x) {
            logger.error("[VFSMountManager] Could not access file within a space: " + uri);

            throw new FileSystemException(x);
        } catch (FileSystemException e) {
            ProActiveLogger.logImpossibleException(logger, e);
            throw new ProActiveRuntimeException(e);
        }

    } finally {
        readLock.unlock();
    }
}

From source file:org.openbi.kettle.plugins.avrooutput.AvroOutput.java

private void createParentFolder(String filename) throws Exception {
    // Check for parent folder
    FileObject parentfolder = null;
    try {/*ww w . j a  v a2s .  c  o m*/
        // Get parent folder
        parentfolder = getFileObject(filename).getParent();
        if (parentfolder.exists()) {
            if (isDetailed()) {
                logDetailed(BaseMessages.getString(PKG, "AvroOutput.Log.ParentFolderExist",
                        parentfolder.getName()));
            }
        } else {
            if (isDetailed()) {
                logDetailed(BaseMessages.getString(PKG, "AvroOutput.Log.ParentFolderNotExist",
                        parentfolder.getName()));
            }
            if (meta.getCreateParentFolder()) {
                parentfolder.createFolder();
                if (isDetailed()) {
                    logDetailed(BaseMessages.getString(PKG, "AvroOutput.Log.ParentFolderCreated",
                            parentfolder.getName()));
                }
            } else {
                throw new KettleException(BaseMessages.getString(PKG,
                        "AvroOutput.Log.ParentFolderNotExistCreateIt", parentfolder.getName(), filename));
            }
        }

    } finally {
        if (parentfolder != null) {
            try {
                parentfolder.close();
            } catch (Exception ex) {
                // Ignore
            }
        }
    }
}

From source file:org.openbi.kettle.plugins.parquetoutput.ParquetOutput.java

public void openNewFile(String baseFilename) throws KettleException {
    if (baseFilename == null) {
        throw new KettleFileException(BaseMessages.getString(PKG, "ParquetOutput.Exception.FileNameNotSet"));
    }//  w  w w.  j  ava2  s.  co  m

    String filename = buildFilename(environmentSubstitute(baseFilename));

    try {
        // Check for parent folder creation only if the user asks for it
        //
        if (meta.isCreateParentFolder()) {
            createParentFolder(filename);
        }

        if (log.isDetailed()) {
            logDetailed("Opening output file in default encoding");
        }

        String compressionCodec = environmentSubstitute(meta.getCompressionCodec());

        if (Const.isEmpty(compressionCodec) || compressionCodec.equalsIgnoreCase("none")) {
            compressionCodec = "uncompressed";
        }

        CompressionCodecName compressionCodecName = CompressionCodecName.fromConf(compressionCodec);

        //Convert to bytes
        int blockSize = -1;

        blockSize = Const.toInt(environmentSubstitute(meta.getBlockSize()), -1) * 1024 * 1024;

        if (blockSize <= 0) {
            throw new KettleException("Error setting block size.  Must be greater than 0.");
        }

        int pageSize = Const.toInt(environmentSubstitute(meta.getPageSize()), -1) * 1024;

        if (pageSize <= 0) {
            throw new KettleException("Error setting page size.  Must be greater than 0.");
        }

        /* HadoopConfiguration hadoopConfiguration =
           HadoopConfigurationBootstrap.getHadoopConfigurationProvider().getActiveConfiguration();
         HadoopShim shim = hadoopConfiguration.getHadoopShim();
         Configuration conf = shim.createConfiguration();
                
         FileSystem fs = shim.getFileSystem( conf );
         Path path = fs.asPath( file.getName().getURI() );
         */

        FileObject file = KettleVFS.getFileObject(filename);

        //Path path = shim.getFileSystem( conf ).asPath( file.getName().getURI() );
        Path path = new Path(file.getName().getURI());

        if (meta.isCleanOutput() && file.exists()) {
            file.delete();
        }

        data.parquetWriters
                .add(new AvroParquetWriter(path, data.avroSchema, compressionCodecName, blockSize, pageSize));
        data.openFiles.add(baseFilename);

        if (log.isDetailed()) {
            logDetailed("Opened new file with name [" + filename + "]");
        }
    } catch (Exception e) {
        throw new KettleException("Error opening new file : " + e.toString(), e);
    }

    data.splitnr++;

    if (meta.isAddToResult()) {
        // Add this to the result file names...
        ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL,
                getFileObject(filename, getTransMeta()), getTransMeta().getName(), getStepname());
        resultFile.setComment(BaseMessages.getString(PKG, "AvroOutput.AddResultFile"));
        addResultFile(resultFile);
    }
}