List of usage examples for org.apache.commons.configuration HierarchicalConfiguration getString
public String getString(String key)
From source file:com.vangent.hieos.empi.config.EMPIConfig.java
/** * * @param hc/*ww w . ja va 2s . c o m*/ * @throws EMPIException */ private void loadMatchAlgorithm(HierarchicalConfiguration hc) throws EMPIException { String matchAlgorithmClassName = hc.getString(MATCH_ALGORITHM); // Get an instance of the match algorithm. this.matchAlgorithm = (MatchAlgorithm) ConfigHelper.loadClassInstance(matchAlgorithmClassName); }
From source file:com.vangent.hieos.empi.config.EMPIConfig.java
/** * * @param hc/*w ww. j a v a 2s . c o m*/ * @throws EMPIException */ private void loadCandidateFinder(HierarchicalConfiguration hc) throws EMPIException { String candidateFinderClassName = hc.getString(CANDIDATE_FINDER); // Get an instance of the match algorithm. this.candidateFinder = (CandidateFinder) ConfigHelper.loadClassInstance(candidateFinderClassName); }
From source file:edu.kit.dama.mdm.core.MetaDataManagement.java
/** * Load configuration from XML-File/*w w w .jav a 2 s .co m*/ */ private void loadConfiguration() { String firstImplementation = null; String firstPersistenceUnit = null; HierarchicalConfiguration hc = null; List<String> persistenceUnits = null; URL configURL = null; try { configURL = DataManagerSettings.getConfigurationURL(); LOGGER.debug("Loading configuration from {}", configURL); hc = new HierarchicalConfiguration(new XMLConfiguration(configURL)); LOGGER.debug("Configuration successfully loaded"); } catch (ConfigurationException ex) { // error in configuration // reason see debug log message: LOGGER.error("Failed to load configuration.", ex); throw new RuntimeException(ex); } SubnodeConfiguration configurationAt = hc.configurationAt(CONFIG_ROOT); List fields = configurationAt.configurationsAt(CONFIG_PERSISTENCE_IMPL); LOGGER.debug("Found {} configured persistence implementations", fields.size()); persistenceUnitMap = new HashMap<>(); persistenceClassMap = new HashMap<>(); persistenceUnitDefaultMap = new HashMap<>(); String implementationName; IPersistenceFactory iPersistenceFactory = null; for (Iterator it = fields.iterator(); it.hasNext();) { HierarchicalConfiguration sub = (HierarchicalConfiguration) it.next(); LOGGER.debug("Reading sub-configuration"); // First get all persistence units. persistenceUnits = new ArrayList<>(); try { List<HierarchicalConfiguration> persistenceUnitsList = sub .configurationsAt(CONFIG_PERSISTENCE_UNIT); if (persistenceUnitsList == null) { persistenceUnitsList = new LinkedList<>(); } LOGGER.debug("Configuration contains {} persistence units.", persistenceUnitsList.size()); firstPersistenceUnit = null; for (HierarchicalConfiguration item : persistenceUnitsList) { String value = item.getString("."); String defaultAttribute = item.getString("[@default]"); LOGGER.debug("PersistenceUnit found: " + value); LOGGER.debug("@default = {}", defaultAttribute); if (Boolean.parseBoolean(defaultAttribute)) { if (firstPersistenceUnit == null) { LOGGER.debug("{} is used as default persistence unit.", value); firstPersistenceUnit = value; } else { LOGGER.warn( "{} is an additional persistence unit defined as default. We'll ignore this.", value); } } LOGGER.debug("Adding persistence unit to list of units."); persistenceUnits.add(value); } } catch (Exception any) { LOGGER.error("Failed to read persistence units.", any); } LOGGER.debug("firstPersistenceUnit: " + firstPersistenceUnit); if ((persistenceUnits.size() > 0) && (firstPersistenceUnit == null)) { LOGGER.debug("No default persistence unit defined. Using first entry ({})", persistenceUnits.get(0)); firstPersistenceUnit = persistenceUnits.get(0); } LOGGER.debug("Getting implementation name."); implementationName = sub.getString(CONFIG_PERSISTENCE_NAME); LOGGER.debug("Implementation name '{}' found.", implementationName); if (firstImplementation == null) { LOGGER.debug("Using implementation '{}' as first implementation.", implementationName); firstImplementation = implementationName; } LOGGER.debug("Testing implementation '{}'", implementationName); if (sub.containsKey(CONFIG_DEFAULT_PERSISTENCE)) { LOGGER.debug("'{}' is configured as default implementation.", implementationName); if (defaultImplementation != null) { LOGGER.warn("{} is an additional implementation defined as default. We'll ignore this.", implementationName); } else { defaultImplementation = implementationName; } } Class<?> loadClass; boolean success = false; String persistenceClass = sub.getString(CONFIG_PERSISTENCE_CLASS); try { LOGGER.debug("Loading class '{}': ", persistenceClass); loadClass = getClass().getClassLoader().loadClass(persistenceClass); LOGGER.debug("Checking IPersistenceFactory.class.assignableFrom({})", persistenceClass); success = IPersistenceFactory.class.isAssignableFrom(loadClass); iPersistenceFactory = null; if (success) { LOGGER.debug("Creating instance of class {}", persistenceClass); iPersistenceFactory = (IPersistenceFactory) loadClass.newInstance(); LOGGER.debug("Persistence factory successfully instantiated."); } else { LOGGER.error("IPersistenceFactory seems not to be assignable from class {}", persistenceClass); } } catch (InstantiationException | IllegalAccessException | ClassNotFoundException ex) { LOGGER.error("Failed to create instance of persistence implementation " + persistenceClass, ex); success = false; } if (success) { persistenceUnitMap.put(implementationName, persistenceUnits); persistenceClassMap.put(implementationName, iPersistenceFactory); persistenceUnitDefaultMap.put(implementationName, firstPersistenceUnit); } else { throw new edu.kit.dama.mdm.core.exception.ConfigurationException( "Failed to initialize persistence factory from URL '" + configURL + "'. See logfile for details."); } } if (defaultImplementation == null) { LOGGER.debug("Default implementation not set, yet. Using first one ({}) as default.", firstImplementation); defaultImplementation = firstImplementation; } }
From source file:com.gs.obevo.db.api.factory.DbEnvironmentXmlEnricher.java
@Override public DeploySystem<DbEnvironment> readSystem(FileObject sourcePath) { HierarchicalConfiguration sysCfg = getConfig(sourcePath); DbPlatform systemDbPlatform = dbPlatformConfiguration.valueOf(sysCfg.getString("[@type]")); MutableList<String> sourceDirs = ListAdapter.adapt(sysCfg.getList("[@sourceDirs]")); ImmutableSet<String> acceptedExtensions = ListAdapter.adapt(sysCfg.getList("[@acceptedExtensions]")).toSet() .toImmutable();//from w ww . j ava2s . c o m MutableList<DbEnvironment> envList = Lists.mutable.empty(); for (HierarchicalConfiguration envCfg : iterConfig(sysCfg, "environments.dbEnvironment")) { DbEnvironment dbEnv = new DbEnvironment(); FileObject rootDir = sourcePath.getType() == FileType.FILE ? sourcePath.getParent() : sourcePath; // Use coreSourcePath and additionalSourceDirs here (instead of setSourceDirs) to facilitate any external integrations dbEnv.setCoreSourcePath(rootDir); dbEnv.setAdditionalSourceDirs(sourceDirs); dbEnv.setAcceptedExtensions(acceptedExtensions); dbEnv.setCleanBuildAllowed(envCfg.getBoolean("[@cleanBuildAllowed]", false)); dbEnv.setDbHost(envCfg.getString("[@dbHost]")); dbEnv.setDbPort(envCfg.getInt("[@dbPort]", 0)); dbEnv.setDbServer(envCfg.getString("[@dbServer]")); dbEnv.setDbSchemaPrefix(envCfg.getString("[@dbSchemaPrefix]")); dbEnv.setDbSchemaSuffix(envCfg.getString("[@dbSchemaSuffix]")); dbEnv.setDbDataSourceName(envCfg.getString("[@dbDataSourceName]")); dbEnv.setJdbcUrl(envCfg.getString("[@jdbcUrl]")); MutableMap<String, String> tokens = Maps.mutable.empty(); for (HierarchicalConfiguration tok : iterConfig(envCfg, "tokens.token")) { tokens.put(tok.getString("[@key]"), tok.getString("[@value]")); } dbEnv.setTokens(tokens.toImmutable()); // Allow the groups + users to be tokenized upfront for compatibility w/ the EnvironmentInfraSetup classes Tokenizer tokenizer = new Tokenizer(dbEnv.getTokens(), dbEnv.getTokenPrefix(), dbEnv.getTokenSuffix()); dbEnv.setGroups(iterConfig(sysCfg, "groups.group").collect(convertCfgToGroup(tokenizer))); dbEnv.setUsers(iterConfig(sysCfg, "users.user").collect(convertCfgToUser(tokenizer))); if (envCfg.getString("[@driverClass]") != null) { dbEnv.setDriverClassName(envCfg.getString("[@driverClass]")); } dbEnv.setName(envCfg.getString("[@name]")); dbEnv.setDefaultUserId(envCfg.getString("[@defaultUserId]")); dbEnv.setDefaultPassword(envCfg.getString("[@defaultPassword]")); dbEnv.setDefaultTablespace(envCfg.getString("[@defaultTablespace]")); // TODO add include/exclude schemas functionality MutableList<Schema> schemaObjs = Lists.mutable.withAll(iterConfig(sysCfg, "schemas.schema")) .collect(convertCfgToSchema(systemDbPlatform)); MutableSet<String> schemasToInclude = iterString(envCfg, "includeSchemas").toSet(); MutableSet<String> schemasToExclude = iterString(envCfg, "excludeSchemas").toSet(); if (!schemasToInclude.isEmpty() && !schemasToExclude.isEmpty()) { throw new IllegalArgumentException("Environment " + dbEnv.getName() + " has includeSchemas [" + schemasToInclude + "] and excludeSchemas [" + schemasToExclude + "] defined; please only specify one of them"); } else if (!schemasToInclude.isEmpty()) { schemaObjs = schemaObjs.select(Predicates.attributeIn(Schema.TO_NAME, schemasToInclude)); } else if (!schemasToExclude.isEmpty()) { schemaObjs = schemaObjs.reject(Predicates.attributeIn(Schema.TO_NAME, schemasToExclude)); } MutableMap<String, String> schemaNameOverrides = Maps.mutable.empty(); MutableSet<String> schemaNames = schemaObjs.collect(Schema.TO_NAME).toSet(); for (HierarchicalConfiguration schemaOverride : iterConfig(envCfg, "schemaOverrides.schemaOverride")) { String schema = schemaOverride.getString("[@schema]"); if (schemaObjs.collect(Schema.TO_NAME).contains(schema)) { schemaNameOverrides.put(schema, schemaOverride.getString("[@overrideValue]")); } else { throw new IllegalArgumentException( "Schema override definition value " + schema + " is not defined in the schema list " + schemaNames + " for environment " + dbEnv.getName()); } } dbEnv.setSchemaNameOverrides(schemaNameOverrides.toImmutable()); // ensure that we only store the unique schema names here dbEnv.setSchemas(UnifiedSetWithHashingStrategy .newSet(HashingStrategies.fromFunction(Schema.TO_NAME), schemaObjs).toImmutable()); dbEnv.setPersistToFile(envCfg.getBoolean("[@persistToFile]", false)); dbEnv.setDisableAuditTracking(envCfg.getBoolean("[@disableAuditTracking]", false)); dbEnv.setRollbackDetectionEnabled(envCfg.getBoolean("[@rollbackDetectionEnabled]", sysCfg.getBoolean("[@rollbackDetectionEnabled]", true))); dbEnv.setAutoReorgEnabled( envCfg.getBoolean("[@autoReorgEnabled]", sysCfg.getBoolean("[@autoReorgEnabled]", true))); dbEnv.setInvalidObjectCheckEnabled(envCfg.getBoolean("[@invalidObjectCheckEnabled]", sysCfg.getBoolean("[@invalidObjectCheckEnabled]", true))); dbEnv.setReorgCheckEnabled( envCfg.getBoolean("[@reorgCheckEnabled]", sysCfg.getBoolean("[@reorgCheckEnabled]", true))); dbEnv.setChecksumDetectionEnabled(envCfg.getBoolean("[@checksumDetectionEnabled]", sysCfg.getBoolean("[@checksumDetectionEnabled]", false))); dbEnv.setMetadataLineReaderVersion( envCfg.getInt("[@metadataLineReaderVersion]", sysCfg.getInt("[@metadataLineReaderVersion]", dbPlatformConfiguration.getFeatureToggleVersion("metadataLineReaderVersion")))); dbEnv.setCsvVersion(envCfg.getInt("[@csvVersion]", sysCfg.getInt("[@csvVersion]", dbPlatformConfiguration.getFeatureToggleVersion("csvVersion")))); int legacyDirectoryStructureEnabledValue = envCfg.getInt("[@legacyDirectoryStructureEnabled]", sysCfg.getInt("[@legacyDirectoryStructureEnabled]", dbPlatformConfiguration.getFeatureToggleVersion("legacyDirectoryStructureEnabled"))); dbEnv.setLegacyDirectoryStructureEnabled(legacyDirectoryStructureEnabledValue == 1); // 1 == legacy, 2 == new MutableMap<String, String> extraEnvAttrs = Maps.mutable.empty(); for (String extraEnvAttr : dbPlatformConfiguration.getExtraEnvAttrs()) { String attrStr = "[@" + extraEnvAttr + "]"; extraEnvAttrs.put(extraEnvAttr, envCfg.getString(attrStr, sysCfg.getString(attrStr))); } dbEnv.setExtraEnvAttrs(extraEnvAttrs.toImmutable()); ImmutableList<HierarchicalConfiguration> envPermissions = iterConfig(envCfg, "permissions.permission"); ImmutableList<HierarchicalConfiguration> sysPermissions = iterConfig(sysCfg, "permissions.permission"); if (!envPermissions.isEmpty()) { dbEnv.setPermissions(envPermissions.collect(convertCfgToPermission(tokenizer))); } else if (!sysPermissions.isEmpty()) { dbEnv.setPermissions(sysPermissions.collect(convertCfgToPermission(tokenizer))); } DbPlatform platform; if (envCfg.getString("[@inMemoryDbType]") != null) { platform = dbPlatformConfiguration.valueOf(envCfg.getString("[@inMemoryDbType]")); } else { platform = systemDbPlatform; } dbEnv.setSystemDbPlatform(systemDbPlatform); dbEnv.setPlatform(platform); String delim = sysCfg.getString("[@dataDelimiter]"); if (delim != null) { if (delim.length() == 1) { dbEnv.setDataDelimiter(delim.charAt(0)); } else { throw new IllegalArgumentException( "dataDelimiter must be 1 character long. instead, got [" + delim + "]"); } } String nullToken = sysCfg.getString("[@nullToken]"); if (nullToken != null) { dbEnv.setNullToken(nullToken); } dbEnv.setAuditTableSql(getProperty(sysCfg, envCfg, "auditTableSql")); envList.add(dbEnv); } CollectionUtil.verifyNoDuplicates(envList, DbEnvironment.TO_NAME, "Invalid configuration from " + sourcePath + "; not expecting duplicate env names"); return new DeploySystem<DbEnvironment>(envList); }
From source file:com.vaushell.superpipes.dispatch.Dispatcher.java
/** * Init configuration./*from w w w . j a va2s .com*/ * * @param config Configuration * @param datas Path to store datas * @param vCodeFactory Valication code factory * @throws java.lang.Exception */ public void init(final XMLConfiguration config, final Path datas, final A_ValidatorCode.I_Factory vCodeFactory) throws Exception { if (config == null || datas == null) { throw new IllegalArgumentException(); } if (LOGGER.isInfoEnabled()) { LOGGER.info("[" + getClass().getSimpleName() + "] load() : datas=" + datas); } if (Files.notExists(datas)) { Files.createDirectory(datas); } else { if (!Files.isDirectory(datas)) { throw new IllegalArgumentException("Path '" + datas.toString() + "' should be a directory"); } } this.datas = datas; this.vCodeFactory = vCodeFactory; // Load mailer eMailer.load(config.configurationAt("mailer")); // Load commons commonsProperties.clear(); final List<HierarchicalConfiguration> cCommons = config.configurationsAt("commons.common"); if (cCommons != null) { for (final HierarchicalConfiguration cCommon : cCommons) { final ConfigProperties cProperties = new ConfigProperties(); cProperties.readProperties(cCommon); addCommon(cCommon.getString("[@id]"), cProperties); } } // Load nodes nodes.clear(); final List<HierarchicalConfiguration> cNodes = config.configurationsAt("nodes.node"); if (cNodes != null) { for (final HierarchicalConfiguration cNode : cNodes) { final List<ConfigProperties> commons = new ArrayList<>(); final String commonsID = cNode.getString("[@commons]"); if (commonsID != null) { for (final String commonID : commonsID.split(",")) { final ConfigProperties common = getCommon(commonID); if (common != null) { commons.add(common); } } } final A_Node node = addNode(cNode.getString("[@id]"), cNode.getString("[@type]"), commons); node.load(cNode); } } // Load routes routes.clear(); final List<HierarchicalConfiguration> cRoutes = config.configurationsAt("routes.route"); if (cRoutes != null) { for (final HierarchicalConfiguration cRoute : cRoutes) { final String sourceID = cRoute.getString("[@source]"); final String destinationID = cRoute.getString("[@destination]"); addRoute(sourceID, destinationID); } } }
From source file:net.datenwerke.sandbox.util.SandboxParser.java
protected StackEntry getStackEntry(HierarchicalConfiguration stack) { int pos = stack.getInt("[@pos]"); String stype = stack.getString("[@type]"); Boolean prefix = stack.getBoolean("[@prefix]", false); return new StackEntry(pos, stype, Boolean.TRUE.equals(prefix)); }
From source file:net.datenwerke.sandbox.util.SandboxParser.java
public void parse(Configuration config) { if (!(config instanceof HierarchicalConfiguration)) throw new IllegalArgumentException("Expected HierarchicalConfiguration format"); HierarchicalConfiguration conf = (HierarchicalConfiguration) config; /* sandboxes */ for (HierarchicalConfiguration rs : conf.configurationsAt("security.sandbox")) { String name = rs.getString("[@name]"); if (null == name) throw new IllegalArgumentException("no name for sandbox given"); SandboxContext set = loadSandbox(name, conf, rs, new HashSet<String>()); restrictionSets.put(name, set);//from w w w . ja v a2 s . com } }
From source file:com.webcohesion.enunciate.modules.java_json_client.JavaJSONClientModule.java
public Set<String> getServerSideTypesToUse() { List<HierarchicalConfiguration> typeElements = this.config.configurationsAt("server-side-type"); TreeSet<String> types = new TreeSet<String>(); for (HierarchicalConfiguration typeElement : typeElements) { types.add(typeElement.getString("[@pattern]")); }//from www .j a v a 2 s. c o m return types; }
From source file:net.datenwerke.sandbox.util.SandboxParser.java
protected void configurePackages(SandboxContext set, HierarchicalConfiguration rs) { for (Object e : rs.getList("packages.whitelist.entry")) set.addPackagePermission(AccessType.PERMIT, (String) e); for (Object e : rs.getList("packages.blacklist.entry")) set.addPackagePermission(AccessType.DENY, (String) e); for (HierarchicalConfiguration compl : rs.configurationsAt("packages.whitelist.complex")) { String cName = compl.getString("[@name]"); Collection<StackEntry> entries = new HashSet<StackEntry>(); for (HierarchicalConfiguration stack : compl.configurationsAt("check")) entries.add(getStackEntry(stack)); PackagePermission wpkg = new PackagePermission(cName, entries); set.addPackagePermission(wpkg);// ww w . ja v a 2s.co m } }
From source file:net.datenwerke.sandbox.util.SandboxParser.java
protected void configureClasses(SandboxContext set, HierarchicalConfiguration rs) throws MalformedURLException { for (Object e : rs.getList("classes.whitelist.entry")) set.addClassPermission(AccessType.PERMIT, (String) e); for (Object e : rs.getList("classes.whitelist.jar")) { set.addJarToWhitelist(new URL((String) e)); }//w w w . j av a 2 s . c om for (Object e : rs.getList("classes.blacklist.entry")) set.addClassPermission(AccessType.DENY, (String) e); for (HierarchicalConfiguration compl : rs.configurationsAt("classes.whitelist.complex")) { String cName = compl.getString("[@name]"); Collection<StackEntry> entries = new HashSet<StackEntry>(); for (HierarchicalConfiguration stack : compl.configurationsAt("check")) entries.add(getStackEntry(stack)); ClassPermission wclazz = new ClassPermission(cName, entries); set.addClassPermission(wclazz); } }