Example usage for org.apache.commons.vfs FileObject exists

List of usage examples for org.apache.commons.vfs FileObject exists

Introduction

In this page you can find the example usage for org.apache.commons.vfs FileObject exists.

Prototype

public boolean exists() throws FileSystemException;

Source Link

Document

Determines if this file exists.

Usage

From source file:org.pentaho.di.job.entry.validator.FileExistsValidator.java

public boolean validate(CheckResultSourceInterface source, String propertyName,
        List<CheckResultInterface> remarks, ValidatorContext context) {

    String filename = ValidatorUtils.getValueAsString(source, propertyName);
    VariableSpace variableSpace = getVariableSpace(source, propertyName, remarks, context);
    boolean failIfDoesNotExist = getFailIfDoesNotExist(source, propertyName, remarks, context);

    if (null == variableSpace) {
        return false;
    }// w  ww. j  a  v a  2  s. c o m

    String realFileName = variableSpace.environmentSubstitute(filename);
    FileObject fileObject = null;
    try {
        fileObject = KettleVFS.getFileObject(realFileName, variableSpace);
        if (fileObject == null || (fileObject != null && !fileObject.exists() && failIfDoesNotExist)) {
            JobEntryValidatorUtils.addFailureRemark(source, propertyName, VALIDATOR_NAME, remarks,
                    JobEntryValidatorUtils.getLevelOnFail(context, VALIDATOR_NAME));
            return false;
        }
        try {
            fileObject.close(); // Just being paranoid
        } catch (IOException ignored) {
            // Ignore close errors
        }
    } catch (Exception e) {
        JobEntryValidatorUtils.addExceptionRemark(source, propertyName, VALIDATOR_NAME, remarks, e);
        return false;
    }
    return true;
}

From source file:org.pentaho.di.repository.filerep.KettleFileRepository.java

public boolean exists(final String name, final RepositoryDirectoryInterface repositoryDirectory,
        final RepositoryObjectType objectType) throws KettleException {
    try {//from  ww w  .  j  a v  a  2  s . com
        FileObject fileObject = KettleVFS
                .getFileObject(calcFilename(repositoryDirectory, name, objectType.getExtension()));
        return fileObject.exists();
    } catch (Exception e) {
        throw new KettleException(e);
    }
}

From source file:org.pentaho.di.repository.filerep.KettleFileRepository.java

public RepositoryObject getObjectInformation(ObjectId objectId, RepositoryObjectType objectType)
        throws KettleException {
    try {/*  ww w  .  j  a v  a 2s .  co m*/
        String filename = calcDirectoryName(null);
        if (objectId.getId().startsWith("/")) {
            filename += objectId.getId().substring(1);
        } else {
            filename += objectId.getId();
        }
        FileObject fileObject = KettleVFS.getFileObject(filename);
        if (!fileObject.exists()) {
            return null;
        }
        FileName fname = fileObject.getName();
        String name = fname.getBaseName();
        if (!Const.isEmpty(fname.getExtension()) && name.length() > fname.getExtension().length()) {
            name = name.substring(0, name.length() - fname.getExtension().length() - 1);
        }

        String filePath = fileObject.getParent().getName().getPath();
        final FileObject baseDirObject = KettleVFS.getFileObject(repositoryMeta.getBaseDirectory());
        final int baseDirObjectPathLength = baseDirObject.getName().getPath().length();
        final String dirPath = baseDirObjectPathLength <= filePath.length()
                ? filePath.substring(baseDirObjectPathLength)
                : "/";
        RepositoryDirectoryInterface directory = loadRepositoryDirectoryTree().findDirectory(dirPath);
        Date lastModified = new Date(fileObject.getContent().getLastModifiedTime());

        return new RepositoryObject(objectId, name, directory, "-", lastModified, objectType, "", false);

    } catch (Exception e) {
        throw new KettleException("Unable to get object information for object with id=" + objectId, e);
    }
}

From source file:org.pentaho.di.scoring.WekaScoringData.java

public static boolean modelFileExists(String modelFile, VariableSpace space) throws Exception {

    modelFile = space.environmentSubstitute(modelFile);
    FileObject modelF = KettleVFS.getFileObject(modelFile);

    return modelF.exists();
}

From source file:org.pentaho.di.scoring.WekaScoringData.java

/**
 * Loads a serialized model. Models can either be binary serialized Java
 * objects, objects deep-serialized to xml, or PMML.
 * //from  www. j  av  a2 s  .  c  om
 * @param modelFile a <code>File</code> value
 * @return the model
 * @throws Exception if there is a problem laoding the model.
 */
public static WekaScoringModel loadSerializedModel(String modelFile, LogChannelInterface log,
        VariableSpace space) throws Exception {

    Object model = null;
    Instances header = null;
    int[] ignoredAttsForClustering = null;

    modelFile = space.environmentSubstitute(modelFile);
    FileObject modelF = KettleVFS.getFileObject(modelFile);
    if (!modelF.exists()) {
        throw new Exception(BaseMessages.getString(WekaScoringMeta.PKG,
                "WekaScoring.Error.NonExistentModelFile", space.environmentSubstitute(modelFile))); //$NON-NLS-1$
    }

    InputStream is = KettleVFS.getInputStream(modelF);
    BufferedInputStream buff = new BufferedInputStream(is);

    if (modelFile.toLowerCase().endsWith(".xml")) { //$NON-NLS-1$
        // assume it is PMML
        model = PMMLFactory.getPMMLModel(buff, null);

        // we will use the mining schema as the instance structure
        header = ((PMMLModel) model).getMiningSchema().getMiningSchemaAsInstances();

        buff.close();
    } else if (modelFile.toLowerCase().endsWith(".xstreammodel")) { //$NON-NLS-1$
        log.logBasic(BaseMessages.getString(WekaScoringMeta.PKG, "WekaScoringData.Log.LoadXMLModel")); //$NON-NLS-1$

        if (XStream.isPresent()) {
            Vector v = (Vector) XStream.read(buff);

            model = v.elementAt(0);
            if (v.size() == 2) {
                // try and grab the header
                header = (Instances) v.elementAt(1);
            }
            buff.close();
        } else {
            buff.close();
            throw new Exception(
                    BaseMessages.getString(WekaScoringMeta.PKG, "WekaScoringData.Error.CantLoadXMLModel")); //$NON-NLS-1$
        }
    } else {
        InputStream stream = buff;
        if (modelFile.toLowerCase().endsWith(".gz")) { //$NON-NLS-1$
            stream = new GZIPInputStream(buff);
        }
        ObjectInputStream oi = new ObjectInputStream(stream);

        model = oi.readObject();

        // try and grab the header
        header = (Instances) oi.readObject();

        if (model instanceof weka.clusterers.Clusterer) {
            // try and grab any attributes to be ignored during clustering
            try {
                ignoredAttsForClustering = (int[]) oi.readObject();
            } catch (Exception ex) {
                // Don't moan if there aren't any :-)
            }
        }
        oi.close();
    }

    WekaScoringModel wsm = WekaScoringModel.createScorer(model);
    wsm.setHeader(header);
    if (wsm instanceof WekaScoringClusterer && ignoredAttsForClustering != null) {
        ((WekaScoringClusterer) wsm).setAttributesToIgnore(ignoredAttsForClustering);
    }

    wsm.setLog(log);
    return wsm;
}

From source file:org.pentaho.di.shared.SharedObjects.java

public SharedObjects(String sharedObjectsFile) throws KettleXMLException {
    try {//  w ww . j av a 2 s. co m
        this.filename = createFilename(sharedObjectsFile);
        this.objectsMap = new Hashtable<SharedEntry, SharedObjectInterface>();

        // Extra information
        FileObject file = KettleVFS.getFileObject(filename);

        // If we have a shared file, load the content, otherwise, just keep this one empty
        if (file.exists()) {
            Document document = XMLHandler.loadXMLFile(file);
            Node sharedObjectsNode = XMLHandler.getSubNode(document, XML_TAG);
            if (sharedObjectsNode != null) {
                List<SlaveServer> privateSlaveServers = new ArrayList<SlaveServer>();
                List<DatabaseMeta> privateDatabases = new ArrayList<DatabaseMeta>();

                NodeList childNodes = sharedObjectsNode.getChildNodes();
                // First load databases & slaves
                //
                for (int i = 0; i < childNodes.getLength(); i++) {
                    Node node = childNodes.item(i);
                    String nodeName = node.getNodeName();

                    SharedObjectInterface isShared = null;

                    if (nodeName.equals(DatabaseMeta.XML_TAG)) {
                        DatabaseMeta sharedDatabaseMeta = new DatabaseMeta(node);
                        isShared = sharedDatabaseMeta;
                        privateDatabases.add(sharedDatabaseMeta);
                    } else if (nodeName.equals(SlaveServer.XML_TAG)) {
                        SlaveServer sharedSlaveServer = new SlaveServer(node);
                        isShared = sharedSlaveServer;
                        privateSlaveServers.add(sharedSlaveServer);
                    }

                    if (isShared != null) {
                        isShared.setShared(true);
                        storeObject(isShared);
                    }
                }

                // Then load the other objects that might reference databases & slaves
                //
                for (int i = 0; i < childNodes.getLength(); i++) {
                    Node node = childNodes.item(i);
                    String nodeName = node.getNodeName();

                    SharedObjectInterface isShared = null;

                    if (nodeName.equals(StepMeta.XML_TAG)) {
                        StepMeta stepMeta = new StepMeta(node, privateDatabases, (IMetaStore) null);
                        stepMeta.setDraw(false); // don't draw it, keep it in the tree.
                        isShared = stepMeta;
                    } else if (nodeName.equals(PartitionSchema.XML_TAG)) {
                        isShared = new PartitionSchema(node);
                    } else if (nodeName.equals(ClusterSchema.XML_TAG)) {
                        isShared = new ClusterSchema(node, privateSlaveServers);
                    }

                    if (isShared != null) {
                        isShared.setShared(true);
                        storeObject(isShared);
                    }
                }
            }
        }
    } catch (Exception e) {
        throw new KettleXMLException(
                BaseMessages.getString(PKG, "SharedOjects.Readingfile.UnexpectedError", sharedObjectsFile), e);
    }
}

From source file:org.pentaho.di.shared.SharedObjects.java

public void saveToFile() throws IOException, KettleException {
    FileObject fileObject = KettleVFS.getFileObject(filename);

    if (fileObject.exists()) {
        // Create a backup before overwriting...
        ///*from w w  w  .  java  2s. com*/
        FileObject backupFile = KettleVFS.getFileObject(filename + ".backup");
        fileObject.moveTo(backupFile);
    }

    OutputStream outputStream = KettleVFS.getOutputStream(fileObject, false);

    PrintStream out = new PrintStream(outputStream);

    out.print(XMLHandler.getXMLHeader(Const.XML_ENCODING));
    out.println("<" + XML_TAG + ">");

    Collection<SharedObjectInterface> collection = objectsMap.values();
    for (SharedObjectInterface sharedObject : collection) {
        out.println(sharedObject.getXML());
    }

    out.println("</" + XML_TAG + ">");

    out.flush();
    out.close();
    outputStream.close();
}

From source file:org.pentaho.di.trans.steps.blockingstep.BlockingStep.java

public void dispose(StepMetaInterface smi, StepDataInterface sdi) {
    if ((data.dis != null) && (data.dis.size() > 0)) {
        for (DataInputStream is : data.dis) {
            BaseStep.closeQuietly(is);/*  ww w  .java  2 s . c o  m*/
        }
    }
    // remove temp files
    for (int f = 0; f < data.files.size(); f++) {
        FileObject fileToDelete = data.files.get(f);
        try {
            if (fileToDelete != null && fileToDelete.exists()) {
                fileToDelete.delete();
            }
        } catch (FileSystemException e) {
            logError(e.getLocalizedMessage(), e);
        }
    }
    super.dispose(smi, sdi);
}

From source file:org.pentaho.di.trans.steps.csvinput.CsvInputMeta.java

/**
 * @param space/*from   w  w w. j a va2 s.c o m*/
 *          the variable space to use
 * @param definitions
 * @param resourceNamingInterface
 * @param repository
 *          The repository to optionally load other resources from (to be converted to XML)
 * @param metaStore
 *          the metaStore in which non-kettle metadata could reside.
 *
 * @return the filename of the exported resource
 */
public String exportResources(VariableSpace space, Map<String, ResourceDefinition> definitions,
        ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore)
        throws KettleException {
    try {
        // The object that we're modifying here is a copy of the original!
        // So let's change the filename from relative to absolute by grabbing the file object...
        // In case the name of the file comes from previous steps, forget about this!
        //
        if (Const.isEmpty(filenameField)) {
            // From : ${Internal.Transformation.Filename.Directory}/../foo/bar.csv
            // To : /home/matt/test/files/foo/bar.csv
            //
            FileObject fileObject = KettleVFS.getFileObject(space.environmentSubstitute(filename), space);

            // If the file doesn't exist, forget about this effort too!
            //
            if (fileObject.exists()) {
                // Convert to an absolute path...
                //
                filename = resourceNamingInterface.nameResource(fileObject, space, true);

                return filename;
            }
        }
        return null;
    } catch (Exception e) {
        throw new KettleException(e);
    }
}

From source file:org.pentaho.di.trans.steps.cubeinput.CubeInputMeta.java

/**
 * @param space/*from  w w  w . j a  va  2 s  .co m*/
 *          the variable space to use
 * @param definitions
 * @param resourceNamingInterface
 * @param repository
 *          The repository to optionally load other resources from (to be converted to XML)
 * @param metaStore
 *          the metaStore in which non-kettle metadata could reside.
 *
 * @return the filename of the exported resource
 */
public String exportResources(VariableSpace space, Map<String, ResourceDefinition> definitions,
        ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore)
        throws KettleException {
    try {
        // The object that we're modifying here is a copy of the original!
        // So let's change the filename from relative to absolute by grabbing the file object...
        //
        // From : ${Internal.Transformation.Filename.Directory}/../foo/bar.data
        // To : /home/matt/test/files/foo/bar.data
        //
        FileObject fileObject = KettleVFS.getFileObject(space.environmentSubstitute(filename), space);

        // If the file doesn't exist, forget about this effort too!
        //
        if (fileObject.exists()) {
            // Convert to an absolute path...
            //
            filename = resourceNamingInterface.nameResource(fileObject, space, true);

            return filename;
        }
        return null;
    } catch (Exception e) {
        throw new KettleException(e);
    }
}