Example usage for org.apache.commons.configuration HierarchicalINIConfiguration HierarchicalINIConfiguration

List of usage examples for org.apache.commons.configuration HierarchicalINIConfiguration HierarchicalINIConfiguration

Introduction

In this page you can find the example usage for org.apache.commons.configuration HierarchicalINIConfiguration HierarchicalINIConfiguration.

Prototype

public HierarchicalINIConfiguration(URL url) throws ConfigurationException 

Source Link

Document

Create and load the ini configuration from the given url.

Usage

From source file:io.github.collaboratory.LauncherCWL.java

public void run(Class cwlClassTarget) {
    // now read in the INI file
    try {//from www.  j  av a2s.  com
        config = new HierarchicalINIConfiguration(configFilePath);
    } catch (ConfigurationException e) {
        throw new RuntimeException("could not read launcher config ini", e);
    }

    // parse the CWL tool definition without validation
    // final String imageDescriptorContent = cwlUtil.parseCWL(imageDescriptorPath, false).getLeft();
    final String imageDescriptorContent = this.parseCWL(imageDescriptorPath).getLeft();
    final Object cwlObject = gson.fromJson(imageDescriptorContent, cwlClassTarget);

    if (cwlObject == null) {
        LOG.info("CWL Workflow was null");
        return;
    }

    // this is the job parameterization, just a JSON, defines the inputs/outputs in terms or real URLs that are provisioned by the launcher
    Map<String, Object> inputsAndOutputsJson = loadJob(runtimeDescriptorPath);

    if (inputsAndOutputsJson == null) {
        LOG.info("Cannot load job object.");
        return;
    }

    // setup directories
    globalWorkingDir = setupDirectories();

    Map<String, FileProvisioning.FileInfo> inputsId2dockerMountMap;
    Map<String, List<FileProvisioning.FileInfo>> outputMap;

    System.out.println("Provisioning your input files to your local machine");
    if (cwlObject instanceof Workflow) {
        Workflow workflow = (Workflow) cwlObject;
        // pull input files
        inputsId2dockerMountMap = pullFiles(workflow, inputsAndOutputsJson);

        // prep outputs, just creates output dir and records what the local output path will be
        outputMap = prepUploadsWorkflow(workflow, inputsAndOutputsJson);

    } else if (cwlObject instanceof CommandLineTool) {
        CommandLineTool commandLineTool = (CommandLineTool) cwlObject;
        // pull input files
        inputsId2dockerMountMap = pullFiles(commandLineTool, inputsAndOutputsJson);

        // prep outputs, just creates output dir and records what the local output path will be
        outputMap = prepUploadsTool(commandLineTool, inputsAndOutputsJson);
    } else {
        throw new UnsupportedOperationException("CWL target type not supported yet");
    }
    // create updated JSON inputs document
    String newJsonPath = createUpdatedInputsAndOutputsJson(inputsId2dockerMountMap, outputMap,
            inputsAndOutputsJson);

    // run command
    System.out.println(
            "Calling out to cwltool to run your " + (cwlObject instanceof Workflow ? "workflow" : "tool"));
    Map<String, Object> outputObj = runCWLCommand(imageDescriptorPath, newJsonPath,
            globalWorkingDir + "/outputs/", globalWorkingDir + "/working/");
    System.out.println();

    // push output files
    System.out.println("Provisioning your output files to their final destinations");
    pushOutputFiles(outputMap, outputObj);
}

From source file:au.org.ands.vocabs.toolkit.provider.transform.PropertyRewriterTransformProvider.java

/** Loads the rewrite map into metadataRewriteConf.
 * @return True if loading was successful. */
private boolean loadRewriteMap() {
    File metadataRewriteMap = new File(METADATA_REWRITE_MAP_PATH);
    try {/*from w  ww.j a  va 2 s.c o  m*/
        metadataRewriteConf = new HierarchicalINIConfiguration(metadataRewriteMap);
        return true;
    } catch (ConfigurationException e) {
        logger.error("Toolkit.metadataRewriteMapPath is empty, or file" + " can not be loaded", e);
        return false;
    }
}

From source file:ee.ria.xroad.signer.tokenmanager.module.ModuleConf.java

private static void reload(String fileName) throws Exception {
    log.trace("Loading module configuration from '{}'", fileName);

    MODULES.clear();// w  w  w.ja v  a2 s  . com
    MODULES.put(SoftwareModuleType.TYPE, new SoftwareModuleType());

    HierarchicalINIConfiguration conf = new HierarchicalINIConfiguration(fileName);

    for (String uid : conf.getSections()) {
        if (StringUtils.isBlank(uid)) {
            log.error("No UID specified for module, skipping...");

            continue;
        }

        try {
            parseSection(uid, conf.getSection(uid));
        } catch (ConfigurationRuntimeException e) {
            log.error("Parse section failed with", e);
        }
    }
}

From source file:io.datalayer.conf.HierarchicalIniConfigurationTest.java

/**
 * Tests loading a configuration from a File.
 *//*ww  w  .ja  v  a2  s  .c  o  m*/
@Test
public void testLoadFile() throws ConfigurationException, IOException {
    writeTestFile(INI_DATA);
    HierarchicalINIConfiguration config = new HierarchicalINIConfiguration(TEST_FILE);
    checkContent(config);
}

From source file:io.datalayer.conf.HierarchicalIniConfigurationTest.java

/**
 * Tests loading a configuration from a file name.
 *///from w  w  w . j a  v a  2  s.  c  o  m
@Test
public void testLoadFileName() throws ConfigurationException, IOException {
    writeTestFile(INI_DATA);
    HierarchicalINIConfiguration config = new HierarchicalINIConfiguration(TEST_FILE.getAbsolutePath());
    checkContent(config);
}

From source file:io.datalayer.conf.HierarchicalIniConfigurationTest.java

/**
 * Tests loading a configuration from a URL.
 */// w w w.j a v  a2  s  .  co  m
@Test
public void testLoadURL() throws ConfigurationException, IOException {
    writeTestFile(INI_DATA);
    HierarchicalINIConfiguration config = new HierarchicalINIConfiguration(TEST_FILE.toURI().toURL());
    checkContent(config);
}

From source file:com.github.rwhogg.git_vcr.App.java

/**
 * Returns configuration details from the default configuration file
 * @return the configuration details//from w  w w.  j ava  2  s . c o  m
 * @throws ConfigurationException if the configuration file is malformed or inadequate
 */
public static HierarchicalINIConfiguration getConfiguration() throws ConfigurationException {
    return new HierarchicalINIConfiguration(Constants.CONFIG_FILENAME);
}

From source file:eu.itesla_project.eurostag.EurostagImpactAnalysis.java

private void writeWp43Configs(List<Contingency> contingencies, Path workingDir)
        throws IOException, ConfigurationException {
    Path baseWp43ConfigFile = PlatformConfig.CONFIG_DIR.resolve(WP43_CONFIGS_FILE_NAME);

    // generate one variant of the base config for all the contingency
    // this allow to add extra variables for some indexes
    HierarchicalINIConfiguration configuration = new HierarchicalINIConfiguration(baseWp43ConfigFile.toFile());
    SubnodeConfiguration node = configuration.getSection("smallsignal");
    node.setProperty("f_instant", parameters.getFaultEventInstant());
    for (int i = 0; i < contingencies.size(); i++) {
        Contingency contingency = contingencies.get(i);
        if (contingency.getElements().isEmpty()) {
            throw new AssertionError("Empty contingency " + contingency.getId());
        }/* w  w  w . j a v a 2s.  co  m*/
        Iterator<ContingencyElement> it = contingency.getElements().iterator();
        // compute the maximum fault duration
        double maxDuration = getFaultDuration(contingency, it.next());
        while (it.hasNext()) {
            maxDuration = Math.max(maxDuration, getFaultDuration(contingency, it.next()));
        }
        node.setProperty("f_duration", maxDuration);
        Path wp43Config = workingDir.resolve(WP43_CONFIGS_PER_FAULT_FILE_NAME
                .replace(Command.EXECUTION_NUMBER_PATTERN, Integer.toString(i)));
        try (Writer writer = Files.newBufferedWriter(wp43Config, StandardCharsets.UTF_8)) {
            configuration.save(writer);
        }
    }
}

From source file:eu.itesla_project.dymola.DymolaImpactAnalysis.java

private List<String> writeDymolaInputs(Path workingDir, List<Contingency> contingencies) throws IOException {
    LOGGER.info(" Start writing dymola inputs");

    List<String> retList = new ArrayList<>();

    DdbConfig ddbConfig = DdbConfig.load();
    String jbossHost = ddbConfig.getJbossHost();
    String jbossPort = ddbConfig.getJbossPort();
    String jbossUser = ddbConfig.getJbossUser();
    String jbossPassword = ddbConfig.getJbossPassword();

    Path dymolaExportPath = workingDir.resolve(MO_EXPORT_DIRECTORY);
    if (!Files.exists(dymolaExportPath)) {
        Files.createDirectory(dymolaExportPath);
    }//w  w  w  .j  a  v a  2  s  .c  o m

    //retrieve modelica export parameters from configuration
    String modelicaVersion = config.getModelicaVersion();
    String sourceEngine = config.getSourceEngine();
    String sourceVersion = config.getSourceEngineVersion();
    Path modelicaPowerSystemLibraryPath = Paths.get(config.getModelicaPowerSystemLibraryFile());

    //write the modelica events file, to feed the modelica exporter
    Path eventsPath = workingDir.resolve(MODELICA_EVENTS_CSV_FILENAME);
    writeModelicaExporterContingenciesFile(eventsPath, contingencies);

    //these are only optional params needed if the source is eurostag
    Path modelicaLibPath = null;

    String slackId = config.getSlackId();
    if ("".equals(slackId)) {
        slackId = null; // null when not specified ()
    }

    LoadFlowFactory loadFlowFactory;
    try {
        loadFlowFactory = config.getLoadFlowFactoryClass().newInstance();
    } catch (InstantiationException | IllegalAccessException e) {
        throw new RuntimeException(e);
    }

    LOGGER.info("Exporting modelica data for network {}, working state-id {} ", network,
            network.getStateManager().getWorkingStateId());
    ModelicaMainExporter exporter = new ModelicaMainExporter(network, slackId, jbossHost, jbossPort, jbossUser,
            jbossPassword, modelicaVersion, sourceEngine, sourceVersion, modelicaLibPath, loadFlowFactory);
    exporter.export(dymolaExportPath);
    ModEventsExport eventsExporter = new ModEventsExport(
            dymolaExportPath.resolve(network.getId() + ".mo").toFile(), eventsPath.toFile());
    eventsExporter.export(dymolaExportPath);
    LOGGER.info(" modelica data exported.");

    // now assemble the input files to feed dymola
    //  one .zip per contingency; in the zip, the .mo file and the powersystem library
    //TODO here it is assumed that contingencies ids in csv file start from 0 (i.e. 0 is the first contingency); id should be decoupled from the implementation
    try (final Stream<Path> pathStream = Files.walk(dymolaExportPath)) {
        pathStream.filter((p) -> !p.toFile().isDirectory() && p.toFile().getAbsolutePath().contains("events_")
                && p.toFile().getAbsolutePath().endsWith(".mo")).forEach(p -> {
                    GenericArchive archive = ShrinkWrap.createDomain().getArchiveFactory()
                            .create(GenericArchive.class);
                    try (FileSystem fileSystem = ShrinkWrapFileSystems.newFileSystem(archive)) {
                        Path rootDir = fileSystem.getPath("/");
                        Files.copy(modelicaPowerSystemLibraryPath,
                                rootDir.resolve(modelicaPowerSystemLibraryPath.getFileName()));
                        Files.copy(Paths.get(p.toString()),
                                rootDir.resolve(DymolaUtil.DYMOLA_SIM_MODEL_INPUT_PREFIX + ".mo"));

                        String[] c = p.getFileName().toString().replace(".mo", "").split("_");
                        try (OutputStream os = Files.newOutputStream(dymolaExportPath.getParent().resolve(
                                DymolaUtil.DYMOLAINPUTZIPFILENAMEPREFIX + "_" + c[c.length - 1] + ".zip"))) {
                            archive.as(ZipExporter.class).exportTo(os);
                            retList.add(new String(c[c.length - 1]));
                        } catch (IOException e) {
                            //e.printStackTrace();
                            throw new RuntimeException(e);
                        }

                    } catch (IOException e) {
                        throw new RuntimeException(e);
                    }

                });
    }
    retList.sort(Comparator.<String>naturalOrder());

    //prepare param inputs for indexes from indexes properties file
    LOGGER.info("writing input indexes parameters in  .mat format - start ");
    try {
        Path baseWp43ConfigFile = PlatformConfig.CONFIG_DIR.resolve(WP43_CONFIG_FILE_NAME);
        HierarchicalINIConfiguration configuration = new HierarchicalINIConfiguration(
                baseWp43ConfigFile.toFile());

        //fix params for smallsignal index (cfr EurostagImpactAnalysis sources)
        SubnodeConfiguration node = configuration.getSection("smallsignal");
        node.setProperty("f_instant", Double.toString(parameters.getFaultEventInstant()));
        for (int i = 0; i < contingencies.size(); i++) {
            Contingency contingency = contingencies.get(i);
            if (contingency.getElements().isEmpty()) {
                throw new AssertionError("Empty contingency " + contingency.getId());
            }
            Iterator<ContingencyElement> it = contingency.getElements().iterator();
            // compute the maximum fault duration
            double maxDuration = getFaultDuration(it.next());
            while (it.hasNext()) {
                maxDuration = Math.max(maxDuration, getFaultDuration(it.next()));
            }
            node.setProperty("f_duration", Double.toString(maxDuration));
        }

        DymolaAdaptersMatParamsWriter writer = new DymolaAdaptersMatParamsWriter(configuration);
        for (String cId : retList) {
            String parFileNamePrefix = DymolaUtil.DYMOLA_SIM_MAT_OUTPUT_PREFIX + "_" + cId + "_wp43_";
            String parFileNameSuffix = "_pars.mat";
            String zippedParFileNameSuffix = "_pars.zip";

            try (OutputStream os = Files.newOutputStream(dymolaExportPath.getParent()
                    .resolve(DymolaUtil.DYMOLAINPUTZIPFILENAMEPREFIX + "_" + cId + zippedParFileNameSuffix))) {
                JavaArchive archive = ShrinkWrap.create(JavaArchive.class);
                Path sfile1 = ShrinkWrapFileSystems.newFileSystem(archive).getPath("/");

                Arrays.asList(config.getIndexesNames()).forEach(indexName -> writer.write(indexName,
                        sfile1.resolve(parFileNamePrefix + indexName + parFileNameSuffix)));

                archive.as(ZipExporter.class).exportTo(os);
            } catch (Exception e) {
                throw new RuntimeException(e);
            }

        }

    } catch (ConfigurationException exc) {
        throw new RuntimeException(exc);
    }

    LOGGER.info("writing input indexes parameters in  .mat format - end - {}", retList);
    return retList;
}

From source file:org.apache.hms.controller.Controller.java

public void parseConfig() {
    StringBuilder confPath = new StringBuilder();
    String confDir = System.getProperty("HMS_CONF_DIR");
    if (confDir == null) {
        confDir = "/etc/hms";
    }//from   w w w.  j  a  v a2s  .c  o m
    confPath.append(confDir);
    confPath.append("/hms.ini");
    try {
        HierarchicalINIConfiguration ini = new HierarchicalINIConfiguration(confPath.toString());
        zookeeperAddress = ini.getSection("zookeeper").getString("quorum", null);
        String user = ini.getSection("zookeeper").getString("user", null);
        String password = ini.getSection("zookeeper").getString("password", null);
        if (user != null && password != null) {
            credential = new StringBuilder().append(user).append(":").append(password).toString();
        }
    } catch (Exception e) {
        LOG.warn("Invalid HMS configuration file: " + confPath);
        zookeeperAddress = null;
    }
    LOG.info("ZooKeeper Quorum in " + confPath.toString() + ": " + zookeeperAddress);
}