Example usage for org.apache.commons.configuration PropertiesConfiguration setProperty

List of usage examples for org.apache.commons.configuration PropertiesConfiguration setProperty

Introduction

In this page you can find the example usage for org.apache.commons.configuration PropertiesConfiguration setProperty.

Prototype

public void setProperty(String key, Object value) 

Source Link

Document

Sets a new value for the specified property.

Usage

From source file:fr.inria.atlanmod.neoemf.data.AbstractPersistenceBackendFactory.java

/**
 * Creates and saves the NeoEMF configuration.
 *
 * @param directory the directory where the configuration must be stored.
 *//*from  ww  w  .j av a  2 s  .  c  o  m*/
protected void processGlobalConfiguration(File directory) throws InvalidDataStoreException {
    PropertiesConfiguration configuration;
    Path path = Paths.get(directory.getAbsolutePath()).resolve(CONFIG_FILE);

    try {
        configuration = new PropertiesConfiguration(path.toFile());
    } catch (ConfigurationException e) {
        throw new InvalidDataStoreException(e);
    }

    if (!configuration.containsKey(BACKEND_PROPERTY)) {
        configuration.setProperty(BACKEND_PROPERTY, getName());
    }

    try {
        configuration.save();
        NeoLogger.debug("Configuration stored at " + path);
    } catch (ConfigurationException e) {
        /*
         * Unable to save configuration.
         * Supposedly it's a minor error, so we log it without rising an exception.
         */
        NeoLogger.warn(e);
    }
}

From source file:com.carmatech.maven.MergeOperation.java

private void overrideIfNotNull(final PropertiesConfiguration properties, final String name,
        final String variableName, String source, final Object newValue) {
    if (newValue != null) {
        logger.info("Property [" + name + "] parameterized with [" + variableName + "] was set to [" + newValue
                + "] using " + source + " properties.");
        properties.setProperty(name, newValue);
    }//from   w w  w.java  2  s.  c  o m
}

From source file:com.cloud.utils.crypt.EncryptionSecretKeyChanger.java

private boolean migrateProperties(File dbPropsFile, Properties dbProps, String newMSKey, String newDBKey) {
    System.out.println("Migrating db.properties..");
    StandardPBEStringEncryptor msEncryptor = new StandardPBEStringEncryptor();
    ;/*w  w  w  . j a  va  2s. c  o m*/
    initEncryptor(msEncryptor, newMSKey);

    try {
        PropertiesConfiguration newDBProps = new PropertiesConfiguration(dbPropsFile);
        if (newDBKey != null && !newDBKey.isEmpty()) {
            newDBProps.setProperty("db.cloud.encrypt.secret", "ENC(" + msEncryptor.encrypt(newDBKey) + ")");
        }
        String prop = dbProps.getProperty("db.cloud.password");
        if (prop != null && !prop.isEmpty()) {
            newDBProps.setProperty("db.cloud.password", "ENC(" + msEncryptor.encrypt(prop) + ")");
        }
        prop = dbProps.getProperty("db.usage.password");
        if (prop != null && !prop.isEmpty()) {
            newDBProps.setProperty("db.usage.password", "ENC(" + msEncryptor.encrypt(prop) + ")");
        }
        newDBProps.save(dbPropsFile.getAbsolutePath());
    } catch (Exception e) {
        e.printStackTrace();
        return false;
    }
    System.out.println("Migrating db.properties Done.");
    return true;
}

From source file:cross.datastructures.pipeline.ResultAwareCommandPipeline.java

@Override
public void before() {
    log.info("Looking for results from previous workflow invocation...");
    if (getWorkflow().getWorkflowXmlFile().isFile()) {
        log.info("Found previous workflow.xml file!");
        if (isInputUpToDate(getInput(), getWorkflow())) {
            log.info("Input data has not changed, restoring workflow!");
            getWorkflow().load(getWorkflow().getWorkflowXmlFile());
        } else {//from w ww  .jav a2s.com
            try {
                log.info("Input data has changed, deleting previous workflow output!");
                FileUtils.deleteDirectory(getWorkflow().getOutputDirectory());
                getWorkflow().clearResults();
            } catch (IOException ex) {
                throw new RuntimeException(
                        "Deletion of directory " + getWorkflow().getOutputDirectory() + " failed!", ex);
            }
            getWorkflow().getOutputDirectory().mkdirs();
        }
    } else {
        log.info("Did not find results from a previous workflow instance!");
    }
    //update input file hashes
    updateWorkflowInputHashes(getInput(), getWorkflow());
    if (getExecutionServer() == null && !getWorkflow().isExecuteLocal()) {
        log.info("Launching execution infrastructure!");
        setExecutionServer(ComputeServerFactory.getComputeServer());
        File computeHostJarLocation = new File(System.getProperty("maltcms.home"), "maltcms.jar");
        if (!computeHostJarLocation.exists() || !computeHostJarLocation.isFile()) {
            throw new ExitVmException("Could not locate maltcms.jar in " + System.getProperty("maltcms.home"));
        }
        final PropertiesConfiguration cfg = new PropertiesConfiguration();
        //set execution type
        cfg.setProperty(ConfigurationKeys.KEY_EXECUTION_MODE, ExecutionType.DRMAA);
        //set location of compute host jar
        cfg.setProperty(ConfigurationKeys.KEY_PATH_TO_COMPUTEHOST_JAR, computeHostJarLocation);
        //exit to console when master server shuts down
        cfg.setProperty(ConfigurationKeys.KEY_MASTER_SERVER_EXIT_ON_SHUTDOWN, true);
        //limit the number of used compute hosts
        cfg.setProperty(ConfigurationKeys.KEY_MAX_NUMBER_OF_CHOSTS,
                getWorkflow().getConfiguration().getInt("maltcms.pipelinethreads", 1));
        //native specs for the drmaa api
        cfg.setProperty(ConfigurationKeys.KEY_NATIVE_SPEC,
                getWorkflow().getConfiguration().getString("mpaxs.nativeSpec", ""));
        getExecutionServer().startMasterServer(cfg);
    }
}

From source file:fr.inria.atlanmod.neoemf.map.datastore.MapPersistenceBackendFactory.java

@Override
public PersistenceBackend createPersistentBackend(File file, Map<?, ?> options)
        throws InvalidDataStoreException {
    File dbFile = FileUtils.getFile(
            NeoMapURI.createNeoMapURI(URI.createFileURI(file.getAbsolutePath()).appendSegment("neoemf.mapdb"))
                    .toFileString());/*from w  w w  .j av a  2s . c om*/
    if (!dbFile.getParentFile().exists()) {
        dbFile.getParentFile().mkdirs();
    }
    PropertiesConfiguration neoConfig = null;
    Path neoConfigPath = Paths.get(file.getAbsolutePath()).resolve(NEO_CONFIG_FILE);
    try {
        neoConfig = new PropertiesConfiguration(neoConfigPath.toFile());
    } catch (ConfigurationException e) {
        throw new InvalidDataStoreException(e);
    }
    if (!neoConfig.containsKey(BACKEND_PROPERTY)) {
        neoConfig.setProperty(BACKEND_PROPERTY, MAPDB_BACKEND);
    }
    if (neoConfig != null) {
        try {
            neoConfig.save();
        } catch (ConfigurationException e) {
            NeoLogger.log(NeoLogger.SEVERITY_ERROR, e);
        }
    }
    Engine mapEngine = DBMaker.newFileDB(dbFile).cacheLRUEnable().mmapFileEnableIfSupported().asyncWriteEnable()
            .makeEngine();
    return new MapPersistenceBackend(mapEngine);
}

From source file:com.mirth.connect.server.migration.Migrate3_1_0.java

private void migrateLog4jProperties() {
    PropertiesConfiguration log4jproperties = new PropertiesConfiguration();
    log4jproperties.setDelimiterParsingDisabled(true);
    log4jproperties.setFile(new File(ClassPathResource.getResourceURI("log4j.properties")));
    try {//from   w  w w.  j a  va2  s .com
        log4jproperties.load();

        String level = (String) log4jproperties.getProperty("log4j.logger.shutdown");
        if (level != null) {
            log4jproperties.setProperty("log4j.logger.undeploy", level);
            log4jproperties.clearProperty("log4j.logger.shutdown");
            Logger.getLogger("undeploy").setLevel(Level.toLevel(level));
        }

        level = (String) log4jproperties
                .getProperty("log4j.logger.com.mirth.connect.donkey.server.channel.RecoveryTask");
        if (StringUtils.isBlank(level)) {
            level = "INFO";
            log4jproperties.setProperty("log4j.logger.com.mirth.connect.donkey.server.channel.RecoveryTask",
                    level);
            Logger.getLogger("com.mirth.connect.donkey.server.channel.RecoveryTask")
                    .setLevel(Level.toLevel(level));
        }

        log4jproperties.save();
    } catch (ConfigurationException e) {
        logger.error("Failed to migrate log4j properties.");
    }
}

From source file:cross.datastructures.pipeline.ResultAwareCommandPipeline.java

/**
 *
 * @param inputFiles/*from   w  w  w . j ava 2s  . c  o  m*/
 * @param workflow
 */
protected void updateWorkflowInputHashes(TupleND<IFileFragment> inputFiles, IWorkflow workflow) {
    PropertiesConfiguration pc = getHashes(workflow);
    Collection<File> files = getInputFiles(inputFiles);
    String fileHash = getRecursiveFileHash(files);
    pc.setProperty(workflow.getName() + ".inputFiles.fileHash", fileHash);
}

From source file:eu.ascetic.zabbixdatalogger.datasource.hostvmfilter.NameEndsFilter.java

/**
 * This creates a name filter that checks to see if the end of a host name
 * matches particular criteria or not. if it does then it will indicate accordingly
 * that the "Zabbix JSON API host" is a host or VM.
 *///from w ww  .j  a  v a 2  s  .c  om
public NameEndsFilter() {
    try {
        PropertiesConfiguration config;
        if (new File(CONFIG_FILE).exists()) {
            config = new PropertiesConfiguration(CONFIG_FILE);
        } else {
            config = new PropertiesConfiguration();
            config.setFile(new File(CONFIG_FILE));
        }
        config.setAutoSave(true); //This will save the configuration file back to disk. In case the defaults need setting.
        ends = config.getString("data.logger.filter.begins", ends);
        config.setProperty("data.logger.filter.begins", ends);
        isHost = config.getBoolean("data.logger.filter.isHost", isHost);
        config.setProperty("data.logger.filter.isHost", isHost);
    } catch (ConfigurationException ex) {
        Logger.getLogger(NameBeginsFilter.class.getName()).log(Level.INFO,
                "Error loading the configuration of the name ends filter");
    }
}

From source file:eu.ascetic.zabbixdatalogger.datasource.hostvmfilter.NameBeginsFilter.java

/**
 * This creates a name filter that checks to see if the start of a host name
 * matches particular criteria or not. if it does then it will indicate accordingly
 * that the "Zabbix JSON API host" is a host or VM.
 *///from   w w  w .  ja v a 2 s  .  co m
public NameBeginsFilter() {
    try {
        PropertiesConfiguration config;
        if (new File(CONFIG_FILE).exists()) {
            config = new PropertiesConfiguration(CONFIG_FILE);
        } else {
            config = new PropertiesConfiguration();
            config.setFile(new File(CONFIG_FILE));
        }
        config.setAutoSave(true); //This will save the configuration file back to disk. In case the defaults need setting.
        begins = config.getString("data.logger.filter.begins", begins);
        config.setProperty("data.logger.filter.begins", begins);
        isHost = config.getBoolean("data.logger.filter.isHost", isHost);
        config.setProperty("data.logger.filter.isHost", isHost);
    } catch (ConfigurationException ex) {
        Logger.getLogger(NameBeginsFilter.class.getName()).log(Level.INFO,
                "Error loading the configuration of the name begins filter");
    }
}

From source file:eu.ascetic.zabbixdatalogger.datasource.hostvmfilter.NamedList.java

/**
 * This creates a name filter that checks to see if the start of a host name
 * matches particular criteria or not. if it does then it will indicate
 * accordingly that the "Zabbix JSON API host" is a host or VM.
 */// ww  w . j a v a 2  s  . co  m
public NamedList() {
    try {
        PropertiesConfiguration config;
        if (new File(CONFIG_FILE).exists()) {
            config = new PropertiesConfiguration(CONFIG_FILE);
        } else {
            config = new PropertiesConfiguration();
            config.setFile(new File(CONFIG_FILE));
        }
        config.setAutoSave(true); //This will save the configuration file back to disk. In case the defaults need setting.
        namedSet = config.getString("data.logger.filter.names", namedSet);
        config.setProperty("data.logger.filter.names", namedSet);
        hostNames.addAll(Arrays.asList(namedSet.split(",")));
    } catch (ConfigurationException ex) {
        Logger.getLogger(NameBeginsFilter.class.getName()).log(Level.INFO,
                "Error loading the configuration of the named list filter");
    }
}