List of usage examples for org.joda.time Interval parse
public static Interval parse(String str)
From source file:eu.itesla_project.modules.topo.CheckSubstationUniqueToposTool.java
License:Mozilla Public License
@Override public void run(CommandLine line) throws Exception { Path caseFile = Paths.get(line.getOptionValue("case-file")); Interval interval = Interval.parse(line.getOptionValue("interval")); Path dictFile = null;// w ww. ja va 2s . c om if (line.hasOption("use-short-ids-dict")) { dictFile = Paths.get(line.getOptionValue("use-short-ids-dict")); } double correlationThreshold = Double.parseDouble(line.getOptionValue("correlation-threshold")); double probabilityThreshold = Double.parseDouble(line.getOptionValue("probability-threshold")); Network network = Importers.loadNetwork(caseFile); if (network == null) { throw new RuntimeException("Case '" + caseFile + "' not found"); } network.getStateManager().allowStateMultiThreadAccess(true); OfflineConfig config = OfflineConfig.load(); try (TopologyMiner topologyMiner = config.getTopologyMinerFactoryClass().newInstance().create()) { Path topoCacheDir = TopologyContext.createTopoCacheDir(network, interval, correlationThreshold, probabilityThreshold); TopologyContext topologyContext = topologyMiner.loadContext(topoCacheDir, interval, correlationThreshold, probabilityThreshold); if (topologyContext == null) { throw new RuntimeException("Topology context not found"); } ShortIdDictionary dict = null; if (dictFile != null) { dict = new ShortIdDictionary(dictFile); } new UniqueTopologyBuilder(topologyContext.getTopologyHistory(), dict).build(network); } }
From source file:eu.itesla_project.modules.topo.PrintSubstationTopoHistoryTool.java
License:Mozilla Public License
@Override public void run(CommandLine line) throws Exception { String substationId = line.getOptionValue("substation-id"); Interval interval = Interval.parse(line.getOptionValue("interval")); Path dictFile = null;//from w w w . java 2 s . co m if (line.hasOption("generate-short-ids-dict")) { dictFile = Paths.get(line.getOptionValue("generate-short-ids-dict")); } OfflineConfig config = OfflineConfig.load(); Map<Set<TopoBus>, AtomicInteger> topos = new LinkedHashMap<>(); try (HistoDbClient histoDbClient = config.getHistoDbClientFactoryClass().newInstance().create()) { int rowCount = histoDbClient.queryCount(interval, HistoDbHorizon.SN); Set<HistoDbAttributeId> attributeIds = Collections .singleton(new HistoDbNetworkAttributeId(substationId, HistoDbAttr.TOPO)); try (InputStream is = histoDbClient.queryCsv(HistoQueryType.data, attributeIds, interval, HistoDbHorizon.SN, false, false)) { new TopoHistoryParser().parse(rowCount, is, new TopoHistoryHandler() { @Override public void onHeader(List<String> substationIds, int rowCount) { } @Override public void onTopology(int row, int col, Set<TopoBus> topo) { if (topo != null) { if (topos.containsKey(topo)) { topos.get(topo).incrementAndGet(); } else { topos.put(topo, new AtomicInteger(1)); } } } }); } } Map<Set<TopoBus>, AtomicInteger> topos2; if (dictFile != null) { ShortIdDictionary dict = createDict(topos.keySet()); dict.write(dictFile); topos2 = translate(topos, dict); } else { topos2 = topos; } topos2.entrySet().stream().forEach(e -> System.out.println(e.getKey() + " " + e.getValue())); }
From source file:eu.itesla_project.modules.topo.PrintSubstationUniqueTopoTool.java
License:Mozilla Public License
@Override public void run(CommandLine line) throws Exception { Path caseFile = Paths.get(line.getOptionValue("case-file")); String substationId = line.getOptionValue("substation-id"); Interval interval = Interval.parse(line.getOptionValue("interval")); Path dictFile = null;/*from w w w .j a va 2s . c om*/ if (line.hasOption("use-short-ids-dict")) { dictFile = Paths.get(line.getOptionValue("use-short-ids-dict")); } double correlationThreshold = Double.parseDouble(line.getOptionValue("correlation-threshold")); double probabilityThreshold = Double.parseDouble(line.getOptionValue("probability-threshold")); Network network = Importers.loadNetwork(caseFile); if (network == null) { throw new RuntimeException("Case '" + caseFile + "' not found"); } network.getStateManager().allowStateMultiThreadAccess(true); OfflineConfig config = OfflineConfig.load(); try (TopologyMiner topologyMiner = config.getTopologyMinerFactoryClass().newInstance().create()) { Path topoCacheDir = TopologyContext.createTopoCacheDir(network, interval, correlationThreshold, probabilityThreshold); TopologyContext topologyContext = topologyMiner.loadContext(topoCacheDir, interval, correlationThreshold, probabilityThreshold); Map<String, UniqueTopology> uniqueTopologies = new UniqueTopologyBuilder( topologyContext.getTopologyHistory()).build(); UniqueTopology uniqueTopology = uniqueTopologies.get(substationId); if (uniqueTopology == null) { throw new RuntimeException("Unique topology not found for substation " + substationId); } ShortIdDictionary dict = null; if (dictFile != null) { dict = new ShortIdDictionary(dictFile); } uniqueTopology.print(System.out, dict); } }
From source file:eu.itesla_project.modules.validation.OfflineValidationTool.java
License:Mozilla Public License
@Override public void run(CommandLine line) throws Exception { OfflineConfig config = OfflineConfig.load(); String rulesDbName = line.hasOption("rules-db-name") ? line.getOptionValue("rules-db-name") : OfflineConfig.DEFAULT_RULES_DB_NAME; String workflowId = line.getOptionValue("workflow"); Path outputDir = Paths.get(line.getOptionValue("output-dir")); double purityThreshold = line.hasOption("purity-threshold") ? Double.parseDouble(line.getOptionValue("purity-threshold")) : DEFAULT_PURITY_THRESHOLD;// w ww. j a va 2 s . c o m Set<Country> countries = Arrays.stream(line.getOptionValue("base-case-countries").split(",")) .map(Country::valueOf).collect(Collectors.toSet()); Interval histoInterval = Interval.parse(line.getOptionValue("history-interval")); boolean mergeOptimized = line.hasOption("merge-optimized"); CaseType caseType = CaseType.valueOf(line.getOptionValue("case-type")); CaseRepositoryFactory caseRepositoryFactory = config.getCaseRepositoryFactoryClass().newInstance(); RulesDbClientFactory rulesDbClientFactory = config.getRulesDbClientFactoryClass().newInstance(); ContingenciesAndActionsDatabaseClient contingencyDb = config.getContingencyDbClientFactoryClass() .newInstance().create(); SimulatorFactory simulatorFactory = config.getSimulatorFactoryClass().newInstance(); LoadFlowFactory loadFlowFactory = config.getLoadFlowFactoryClass().newInstance(); MergeOptimizerFactory mergeOptimizerFactory = config.getMergeOptimizerFactoryClass().newInstance(); SimulationParameters simulationParameters = SimulationParameters.load(); try (ComputationManager computationManager = new LocalComputationManager(); RulesDbClient rulesDb = rulesDbClientFactory.create(rulesDbName); CsvMetricsDb metricsDb = new CsvMetricsDb(outputDir, true, "metrics")) { CaseRepository caseRepository = caseRepositoryFactory.create(computationManager); Queue<DateTime> dates = Queues.synchronizedDeque( new ArrayDeque<>(caseRepository.dataAvailable(caseType, countries, histoInterval))); Map<String, Map<RuleId, ValidationStatus>> statusPerRulePerCase = Collections .synchronizedMap(new TreeMap<>()); Map<String, Map<RuleId, Map<HistoDbAttributeId, Object>>> valuesPerRulePerCase = Collections .synchronizedMap(new TreeMap<>()); int cores = Runtime.getRuntime().availableProcessors(); ExecutorService executorService = Executors.newFixedThreadPool(cores); try { List<Future<?>> tasks = new ArrayList<>(cores); for (int i = 0; i < cores; i++) { tasks.add(executorService.submit((Runnable) () -> { while (dates.size() > 0) { DateTime date = dates.poll(); try { Network network = MergeUtil.merge(caseRepository, date, caseType, countries, loadFlowFactory, 0, mergeOptimizerFactory, computationManager, mergeOptimized); System.out.println("case " + network.getId() + " loaded"); System.out.println("running simulation on " + network.getId() + "..."); network.getStateManager().allowStateMultiThreadAccess(true); String baseStateId = network.getId(); network.getStateManager().cloneState(StateManager.INITIAL_STATE_ID, baseStateId); network.getStateManager().setWorkingState(baseStateId); Map<RuleId, ValidationStatus> statusPerRule = new HashMap<>(); Map<RuleId, Map<HistoDbAttributeId, Object>> valuesPerRule = new HashMap<>(); LoadFlow loadFlow = loadFlowFactory.create(network, computationManager, 0); LoadFlowResult loadFlowResult = loadFlow.run(); System.err.println("load flow terminated (" + loadFlowResult.isOk() + ") on " + network.getId()); if (loadFlowResult.isOk()) { Stabilization stabilization = simulatorFactory.createStabilization(network, computationManager, 0); ImpactAnalysis impactAnalysis = simulatorFactory.createImpactAnalysis(network, computationManager, 0, contingencyDb); Map<String, Object> context = new HashMap<>(); stabilization.init(simulationParameters, context); impactAnalysis.init(simulationParameters, context); StabilizationResult stabilizationResult = stabilization.run(); System.err.println("stabilization terminated (" + stabilizationResult.getStatus() + ") on " + network.getId()); metricsDb.store(workflowId, network.getId(), "STABILIZATION", stabilizationResult.getMetrics()); if (stabilizationResult.getStatus() == StabilizationStatus.COMPLETED) { ImpactAnalysisResult impactAnalysisResult = impactAnalysis .run(stabilizationResult.getState()); System.err.println("impact analysis terminated on " + network.getId()); metricsDb.store(workflowId, network.getId(), "IMPACT_ANALYSIS", impactAnalysisResult.getMetrics()); System.out.println("checking rules on " + network.getId() + "..."); for (SecurityIndex securityIndex : impactAnalysisResult .getSecurityIndexes()) { for (RuleAttributeSet attributeSet : RuleAttributeSet.values()) { statusPerRule.put(new RuleId(attributeSet, securityIndex.getId()), new ValidationStatus(null, securityIndex.isOk())); } } } } Map<HistoDbAttributeId, Object> values = IIDM2DB .extractCimValues(network, new IIDM2DB.Config(null, false)) .getSingleValueMap(); for (RuleAttributeSet attributeSet : RuleAttributeSet.values()) { for (Contingency contingency : contingencyDb.getContingencies(network)) { List<SecurityRule> securityRules = rulesDb.getRules(workflowId, attributeSet, contingency.getId(), null); for (SecurityRule securityRule : securityRules) { SecurityRuleExpression securityRuleExpression = securityRule .toExpression(purityThreshold); SecurityRuleCheckReport checkReport = securityRuleExpression .check(values); valuesPerRule.put(securityRule.getId(), ExpressionAttributeList .list(securityRuleExpression.getCondition()).stream() .collect(Collectors.toMap(attributeId -> attributeId, new Function<HistoDbAttributeId, Object>() { @Override public Object apply( HistoDbAttributeId attributeId) { Object value = values.get(attributeId); return value != null ? value : Float.NaN; } }))); ValidationStatus status = statusPerRule.get(securityRule.getId()); if (status == null) { status = new ValidationStatus(null, null); statusPerRule.put(securityRule.getId(), status); } if (checkReport.getMissingAttributes().isEmpty()) { status.setRuleOk(checkReport.isSafe()); } } } } statusPerRulePerCase.put(network.getId(), statusPerRule); valuesPerRulePerCase.put(network.getId(), valuesPerRule); } catch (Exception e) { LOGGER.error(e.toString(), e); } } })); } for (Future<?> task : tasks) { task.get(); } } finally { executorService.shutdown(); executorService.awaitTermination(1, TimeUnit.MINUTES); } writeCsv(statusPerRulePerCase, valuesPerRulePerCase, outputDir); } }
From source file:eu.itesla_project.modules.wca.WCATool.java
License:Mozilla Public License
@Override public void run(CommandLine line) throws Exception { Path caseFile = Paths.get(line.getOptionValue("case-file")); String offlineWorkflowId = line.getOptionValue("offline-workflow-id"); // can be null meaning use no offline security rules Interval histoInterval = Interval.parse(line.getOptionValue("history-interval")); String rulesDbName = line.hasOption("rules-db-name") ? line.getOptionValue("rules-db-name") : OfflineConfig.DEFAULT_RULES_DB_NAME; double purityThreshold = DEFAULT_PURITY_THRESHOLD; if (line.hasOption("purity-threshold")) { purityThreshold = Double.parseDouble(line.getOptionValue("purity-threshold")); }// w ww . j a v a 2 s . co m Set<SecurityIndexType> securityIndexTypes = null; if (line.hasOption("security-index-types")) { securityIndexTypes = Arrays.stream(line.getOptionValue("security-index-types").split(",")) .map(SecurityIndexType::valueOf).collect(Collectors.toSet()); } Path outputCsvFile = null; if (line.hasOption("output-csv-file")) { outputCsvFile = Paths.get(line.getOptionValue("output-csv-file")); } boolean stopWcaOnViolations = DEFAULT_STOP_WCA_ON_VIOLATIONS; if (line.hasOption("stop-on-violations")) { stopWcaOnViolations = Boolean.parseBoolean(line.getOptionValue("stop-on-violations")); } try (ComputationManager computationManager = new LocalComputationManager()) { WCAParameters parameters = new WCAParameters(histoInterval, offlineWorkflowId, securityIndexTypes, purityThreshold, stopWcaOnViolations); OnlineConfig config = OnlineConfig.load(); ContingenciesAndActionsDatabaseClient contingenciesDb = config.getContingencyDbClientFactoryClass() .newInstance().create(); LoadFlowFactory loadFlowFactory = config.getLoadFlowFactoryClass().newInstance(); WCAFactory wcaFactory = config.getWcaFactoryClass().newInstance(); try (HistoDbClient histoDbClient = new SynchronizedHistoDbClient( config.getHistoDbClientFactoryClass().newInstance().create()); RulesDbClient rulesDbClient = config.getRulesDbClientFactoryClass().newInstance() .create(rulesDbName)) { UncertaintiesAnalyserFactory uncertaintiesAnalyserFactory = config .getUncertaintiesAnalyserFactoryClass().newInstance(); if (Files.isRegularFile(caseFile)) { if (outputCsvFile != null) { throw new RuntimeException( "In case of single wca, only standard output pretty print is supported"); } System.out.println("loading case..."); // load the network Network network = Importers.loadNetwork(caseFile); if (network == null) { throw new RuntimeException("Case '" + caseFile + "' not found"); } network.getStateManager().allowStateMultiThreadAccess(true); WCA wca = wcaFactory.create(network, computationManager, histoDbClient, rulesDbClient, uncertaintiesAnalyserFactory, contingenciesDb, loadFlowFactory); WCAAsyncResult result = wca.runAsync(StateManager.INITIAL_STATE_ID, parameters).join(); Table table = new Table(3, BorderStyle.CLASSIC_WIDE); table.addCell("Contingency"); table.addCell("Cluster"); table.addCell("Causes"); List<CompletableFuture<WCACluster>> futureClusters = new LinkedList<>(result.getClusters()); while (futureClusters.size() > 0) { CompletableFuture .anyOf(futureClusters.toArray(new CompletableFuture[futureClusters.size()])).join(); for (Iterator<CompletableFuture<WCACluster>> it = futureClusters.iterator(); it .hasNext();) { CompletableFuture<WCACluster> futureCluster = it.next(); if (futureCluster.isDone()) { it.remove(); WCACluster cluster = futureCluster.get(); if (cluster != null) { System.out.println("contingency " + cluster.getContingency().getId() + " done: " + cluster.getNum() + " (" + cluster.getOrigin() + ")"); table.addCell(cluster.getContingency().getId()); table.addCell(cluster.getNum() + " (" + cluster.getOrigin() + ")"); List<String> sortedCauses = cluster.getCauses().stream().sorted() .collect(Collectors.toList()); if (sortedCauses != null && sortedCauses.size() > 0) { table.addCell(sortedCauses.get(0)); for (int i = 1; i < sortedCauses.size(); i++) { table.addCell(""); table.addCell(""); table.addCell(sortedCauses.get(i)); } } else { table.addCell(""); } } } } } System.out.println(table.render()); } else if (Files.isDirectory(caseFile)) { if (outputCsvFile == null) { throw new RuntimeException( "In case of multiple wca, you have to specify and ouput to csv file"); } Map<String, Map<String, WCACluster>> clusterPerContingencyPerBaseCase = Collections .synchronizedMap(new TreeMap<>()); Set<String> contingencyIds = Collections.synchronizedSet(new TreeSet<>()); Importers.loadNetworks(caseFile, true, network -> { try { network.getStateManager().allowStateMultiThreadAccess(true); String baseStateId = network.getId(); network.getStateManager().cloneState(StateManager.INITIAL_STATE_ID, baseStateId); network.getStateManager().setWorkingState(baseStateId); WCA wca = wcaFactory.create(network, computationManager, histoDbClient, rulesDbClient, uncertaintiesAnalyserFactory, contingenciesDb, loadFlowFactory); WCAAsyncResult result = wca.runAsync(baseStateId, parameters).join(); Map<String, WCACluster> clusterPerContingency = new HashMap<>(); List<CompletableFuture<WCACluster>> futureClusters = new LinkedList<>( result.getClusters()); while (futureClusters.size() > 0) { CompletableFuture .anyOf(futureClusters.toArray(new CompletableFuture[futureClusters.size()])) .join(); for (Iterator<CompletableFuture<WCACluster>> it = futureClusters.iterator(); it .hasNext();) { CompletableFuture<WCACluster> futureCluster = it.next(); if (futureCluster.isDone()) { it.remove(); WCACluster cluster = futureCluster.get(); if (cluster != null) { System.out.println("case " + network.getId() + ", contingency " + cluster.getContingency().getId() + " done: " + cluster.getNum() + " (" + cluster.getOrigin() + ")"); clusterPerContingency.put(cluster.getContingency().getId(), cluster); contingencyIds.add(cluster.getContingency().getId()); } } } } clusterPerContingencyPerBaseCase.put(network.getId(), clusterPerContingency); } catch (Exception e) { LOGGER.error(e.toString(), e); } }, dataSource -> System.out.println("loading case " + dataSource.getBaseName() + "...")); writeClustersCsv(clusterPerContingencyPerBaseCase, contingencyIds, outputCsvFile); } } } }
From source file:eu.itesla_project.offline.forecast_errors.ForecastErrorsAnalysisParameters.java
License:Mozilla Public License
public static ForecastErrorsAnalysisParameters load() { ModuleConfig config = PlatformConfig.defaultConfig().getModuleConfig("fea-parameters"); DateTime baseCaseDate = DateTime.parse(config.getStringProperty("baseCaseDate")); Interval histoInterval = Interval.parse(config.getStringProperty("histoInterval")); String feAnalysisId = config.getStringProperty("feAnalysisId"); double ir = config.getDoubleProperty("ir"); Integer flagPQ = config.getIntProperty("flagPQ"); Integer method = config.getIntProperty("method"); Integer nClusters = config.getIntProperty("nClusters"); double percentileHistorical = config.getDoubleProperty("percentileHistorical"); Integer modalityGaussian = config.getOptionalIntProperty("modalityGaussian"); Integer outliers = config.getOptionalIntProperty("outliers"); Integer conditionalSampling = config.getOptionalIntProperty("conditionalSampling"); Integer nSamples = config.getIntProperty("nSamples"); Set<Country> countries = config.getEnumSetProperty("countries", Country.class, DEFAULT_COUNTRIES); CaseType caseType = config.getEnumProperty("caseType", CaseType.class, DEFAULT_CASE_TYPE); return new ForecastErrorsAnalysisParameters(baseCaseDate, histoInterval, feAnalysisId, ir, flagPQ, method, nClusters, percentileHistorical, modalityGaussian, outliers, conditionalSampling, nSamples, countries, caseType);/*from ww w . j ava 2 s. c o m*/ }
From source file:eu.itesla_project.offline.tools.CreateOfflineWorkflowTool.java
License:Mozilla Public License
@Override public void run(CommandLine line) throws Exception { String workflowId = line.getOptionValue("workflow"); Set<Country> countries = line.hasOption("base-case-countries") ? Arrays.stream(line.getOptionValue("base-case-countries").split(",")).map(Country::valueOf) .collect(Collectors.toSet()) : getDefaultParameters().getCountries(); DateTime baseCaseDate = line.hasOption("base-case-date") ? DateTime.parse(line.getOptionValue("base-case-date")) : getDefaultParameters().getBaseCaseDate(); Interval histoInterval = line.hasOption("history-interval") ? Interval.parse(line.getOptionValue("history-interval")) : getDefaultParameters().getHistoInterval(); boolean generationSampled = line.hasOption("generation-sampled") || getDefaultParameters().isGenerationSampled(); boolean boundariesSampled = line.hasOption("boundaries-sampled") || getDefaultParameters().isBoundariesSampled(); boolean initTopo = line.hasOption("topo-init") || getDefaultParameters().isInitTopo(); double correlationThreshold = line.hasOption("correlation-threshold") ? Double.parseDouble(line.getOptionValue("correlation-threshold")) : getDefaultParameters().getCorrelationThreshold(); double probabilityThreshold = line.hasOption("probability-threshold") ? Double.parseDouble(line.getOptionValue("probability-threshold")) : getDefaultParameters().getProbabilityThreshold(); boolean loadFlowTransformerVoltageControlOn = line.hasOption("loadflow-transformer-voltage-control-on") || getDefaultParameters().isLoadFlowTransformerVoltageControlOn(); boolean simplifiedWorkflow = line.hasOption("simplified-workflow") || getDefaultParameters().isSimplifiedWorkflow(); boolean mergeOptimized = line.hasOption("merge-optimized") || getDefaultParameters().isMergeOptimized(); Set<Country> attributesCountryFilter = line.hasOption("attributes-country-filter") ? Arrays.stream(line.getOptionValue("attributes-country-filter").split(",")).map(Country::valueOf) .collect(Collectors.toSet()) : getDefaultParameters().getAttributesCountryFilter(); int attributesMinBaseVoltageFilter = line.hasOption("attributes-min-base-voltage-filter") ? Integer.parseInt(line.getOptionValue("attributes-min-base-voltage-filter")) : getDefaultParameters().getAttributesMinBaseVoltageFilter(); OfflineWorkflowCreationParameters parameters = new OfflineWorkflowCreationParameters(countries, baseCaseDate, histoInterval, generationSampled, boundariesSampled, initTopo, correlationThreshold, probabilityThreshold, loadFlowTransformerVoltageControlOn, simplifiedWorkflow, mergeOptimized, attributesCountryFilter, attributesMinBaseVoltageFilter); parameters.print(System.out); try (OfflineApplication app = new RemoteOfflineApplicationImpl()) { String workflowId2 = app.createWorkflow(workflowId, parameters); System.out.println("offline workflow '" + workflowId2 + "' created"); }/*from w ww. j av a 2s. c o m*/ }
From source file:eu.itesla_project.online.db.OnlineDbMVStore.java
License:Mozilla Public License
@Override public OnlineWorkflowParameters getWorkflowParameters(String workflowId) { Objects.requireNonNull(workflowId, "workflow id is null"); LOGGER.info("Getting configuration parameters of wf {}", workflowId); if (isWorkflowStored(workflowId)) { MVStore wfMVStore = getStore(workflowId); if (wfMVStore.hasMap(STORED_PARAMETERS_MAP_NAME)) { MVMap<String, String> storedParametersMap = wfMVStore.openMap(STORED_PARAMETERS_MAP_NAME, mapBuilder);//from w w w .ja va 2s . c om DateTime baseCaseDate = DateTime.parse(storedParametersMap.get(STORED_PARAMETERS_BASECASE_KEY)); int states = Integer.parseInt(storedParametersMap.get(STORED_PARAMETERS_STATE_NUMBER_KEY)); String offlineWorkflowId = storedParametersMap.get(STORED_PARAMETERS_OFFLINE_WF_ID_KEY); TimeHorizon timeHorizon = TimeHorizon .fromName(storedParametersMap.get(STORED_RESULTS_TIMEHORIZON_KEY)); Interval histoInterval = Interval .parse(storedParametersMap.get(STORED_PARAMETERS_HISTO_INTERVAL_KEY)); String feAnalysisId = storedParametersMap.get(STORED_PARAMETERS_FEA_ID_KEY); double rulesPurityThreshold = Double .parseDouble((storedParametersMap.get(STORED_PARAMETERS_RULES_PURITY_KEY) == null) ? "1" : storedParametersMap.get(STORED_PARAMETERS_RULES_PURITY_KEY)); boolean storeStates = Boolean .parseBoolean(storedParametersMap.get(STORED_PARAMETERS_STORE_STATES_KEY)); boolean analyseBasecase = Boolean .parseBoolean(storedParametersMap.get(STORED_PARAMETERS_ANALYSE_BASECASE_KEY)); boolean validation = Boolean .parseBoolean(storedParametersMap.get(STORED_PARAMETERS_VALIDATION_KEY)); Set<SecurityIndexType> securityIndexes = null; if (storedParametersMap.containsKey(STORED_PARAMETERS_SECURITY_INDEXES_KEY)) securityIndexes = OnlineDbMVStoreUtils .jsonToIndexesTypes(storedParametersMap.get(STORED_PARAMETERS_SECURITY_INDEXES_KEY)); CaseType caseType = CaseType.valueOf(storedParametersMap.get(STORED_PARAMETERS_CASE_TYPE_KEY)); Set<Country> countries = OnlineDbMVStoreUtils .jsonToCountries(storedParametersMap.get(STORED_PARAMETERS_COUNTRIES_KEY)); boolean mergeOptimized = OnlineWorkflowParameters.DEFAULT_MERGE_OPTIMIZED; if (storedParametersMap.containsKey(STORED_PARAMETERS_MERGE_OPTIMIZED_KEY)) mergeOptimized = Boolean .parseBoolean(storedParametersMap.get(STORED_PARAMETERS_MERGE_OPTIMIZED_KEY)); float limitReduction = OnlineWorkflowParameters.DEFAULT_LIMIT_REDUCTION; if (storedParametersMap.containsKey(STORED_PARAMETERS_LIMIT_REDUCTION_KEY)) limitReduction = Float .parseFloat(storedParametersMap.get(STORED_PARAMETERS_LIMIT_REDUCTION_KEY)); boolean handleViolations = OnlineWorkflowParameters.DEFAULT_HANDLE_VIOLATIONS_IN_N; if (storedParametersMap.containsKey(STORED_PARAMETERS_HANDLE_VIOLATIONS_KEY)) handleViolations = Boolean .parseBoolean(storedParametersMap.get(STORED_PARAMETERS_HANDLE_VIOLATIONS_KEY)); float constraintMargin = OnlineWorkflowParameters.DEFAULT_CONSTRAINT_MARGIN; if (storedParametersMap.containsKey(STORED_PARAMETERS_CONSTRAINT_MARGIN_KEY)) constraintMargin = Float .parseFloat(storedParametersMap.get(STORED_PARAMETERS_CONSTRAINT_MARGIN_KEY)); OnlineWorkflowParameters onlineWfPars = new OnlineWorkflowParameters(baseCaseDate, states, histoInterval, offlineWorkflowId, timeHorizon, feAnalysisId, rulesPurityThreshold, storeStates, analyseBasecase, validation, securityIndexes, caseType, countries, mergeOptimized, limitReduction, handleViolations, constraintMargin); if (storedParametersMap.containsKey(STORED_PARAMETERS_CASE_FILE_KEY)) { onlineWfPars.setCaseFile(storedParametersMap.get(STORED_PARAMETERS_CASE_FILE_KEY)); } return onlineWfPars; } else { LOGGER.warn("No configuration parameters of wf {} stored in online db", workflowId); return null; } } else { LOGGER.warn("No data about wf {}", workflowId); return null; } }
From source file:eu.itesla_project.online.tools.ListOnlineWorkflowsTool.java
License:Mozilla Public License
@Override public void run(CommandLine line) throws Exception { OnlineConfig config = OnlineConfig.load(); OnlineDb onlinedb = config.getOnlineDbFactoryClass().newInstance().create(); List<OnlineWorkflowDetails> workflows = null; if (line.hasOption("basecase")) { DateTime basecaseDate = DateTime.parse(line.getOptionValue("basecase")); workflows = onlinedb.listWorkflows(basecaseDate); } else if (line.hasOption("basecases-interval")) { Interval basecasesInterval = Interval.parse(line.getOptionValue("basecases-interval")); workflows = onlinedb.listWorkflows(basecasesInterval); } else if (line.hasOption("workflow")) { String workflowId = line.getOptionValue("workflow"); OnlineWorkflowDetails workflowDetails = onlinedb.getWorkflowDetails(workflowId); workflows = new ArrayList<OnlineWorkflowDetails>(); if (workflowDetails != null) workflows.add(workflowDetails); } else// ww w. ja v a 2 s .com workflows = onlinedb.listWorkflows(); boolean printParameters = line.hasOption("parameters"); DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss"); Table table = new Table(2, BorderStyle.CLASSIC_WIDE); if (printParameters) table = new Table(3, BorderStyle.CLASSIC_WIDE); List<Map<String, String>> jsonData = new ArrayList<Map<String, String>>(); table.addCell("ID", new CellStyle(CellStyle.HorizontalAlign.center)); table.addCell("Date", new CellStyle(CellStyle.HorizontalAlign.center)); if (printParameters) table.addCell("Parameters", new CellStyle(CellStyle.HorizontalAlign.center)); for (OnlineWorkflowDetails workflow : workflows) { Map<String, String> wfJsonData = new HashMap<String, String>(); table.addCell(workflow.getWorkflowId()); wfJsonData.put("id", workflow.getWorkflowId()); table.addCell(formatter.print(workflow.getWorkflowDate())); wfJsonData.put("date", formatter.print(workflow.getWorkflowDate())); if (printParameters) { OnlineWorkflowParameters parameters = onlinedb.getWorkflowParameters(workflow.getWorkflowId()); if (parameters != null) { table.addCell("Basecase = " + parameters.getBaseCaseDate().toString()); wfJsonData.put(OnlineWorkflowCommand.BASE_CASE, parameters.getBaseCaseDate().toString()); table.addCell(" "); table.addCell(" "); table.addCell("Time Horizon = " + parameters.getTimeHorizon().getName()); wfJsonData.put(OnlineWorkflowCommand.TIME_HORIZON, parameters.getTimeHorizon().getName()); table.addCell(" "); table.addCell(" "); table.addCell("FE Analysis Id = " + parameters.getFeAnalysisId()); wfJsonData.put(OnlineWorkflowCommand.FEANALYSIS_ID, parameters.getFeAnalysisId()); table.addCell(" "); table.addCell(" "); table.addCell("Offline Workflow Id = " + parameters.getOfflineWorkflowId()); wfJsonData.put(OnlineWorkflowCommand.WORKFLOW_ID, parameters.getOfflineWorkflowId()); table.addCell(" "); table.addCell(" "); table.addCell("Historical Interval = " + parameters.getHistoInterval().toString()); wfJsonData.put(OnlineWorkflowCommand.HISTODB_INTERVAL, parameters.getHistoInterval().toString()); table.addCell(" "); table.addCell(" "); table.addCell("States = " + Integer.toString(parameters.getStates())); wfJsonData.put(OnlineWorkflowCommand.STATES, Integer.toString(parameters.getStates())); table.addCell(" "); table.addCell(" "); table.addCell( "Rules Purity Threshold = " + Double.toString(parameters.getRulesPurityThreshold())); wfJsonData.put(OnlineWorkflowCommand.RULES_PURITY, Double.toString(parameters.getRulesPurityThreshold())); table.addCell(" "); table.addCell(" "); table.addCell("Store States = " + Boolean.toString(parameters.storeStates())); wfJsonData.put(OnlineWorkflowCommand.STORE_STATES, Boolean.toString(parameters.storeStates())); table.addCell(" "); table.addCell(" "); table.addCell("Analyse Basecase = " + Boolean.toString(parameters.analyseBasecase())); wfJsonData.put(OnlineWorkflowCommand.ANALYSE_BASECASE, Boolean.toString(parameters.analyseBasecase())); table.addCell(" "); table.addCell(" "); table.addCell("Validation = " + Boolean.toString(parameters.validation())); wfJsonData.put(OnlineWorkflowCommand.VALIDATION, Boolean.toString(parameters.validation())); table.addCell(" "); table.addCell(" "); String securityRulesString = parameters.getSecurityIndexes() == null ? "ALL" : parameters.getSecurityIndexes().toString(); table.addCell("Security Rules = " + securityRulesString); wfJsonData.put(OnlineWorkflowCommand.SECURITY_INDEXES, securityRulesString); table.addCell(" "); table.addCell(" "); table.addCell("Case Type = " + parameters.getCaseType()); wfJsonData.put(OnlineWorkflowCommand.CASE_TYPE, parameters.getCaseType().name()); table.addCell(" "); table.addCell(" "); table.addCell("Countries = " + parameters.getCountries().toString()); wfJsonData.put(OnlineWorkflowCommand.COUNTRIES, parameters.getCountries().toString()); table.addCell(" "); table.addCell(" "); table.addCell("Limits Reduction = " + Float.toString(parameters.getLimitReduction())); wfJsonData.put(OnlineWorkflowCommand.LIMIT_REDUCTION, Float.toString(parameters.getLimitReduction())); table.addCell(" "); table.addCell(" "); table.addCell( "Handle Violations in N = " + Boolean.toString(parameters.isHandleViolationsInN())); wfJsonData.put(OnlineWorkflowCommand.HANDLE_VIOLATION_IN_N, Boolean.toString(parameters.isHandleViolationsInN())); table.addCell(" "); table.addCell(" "); table.addCell("Constrain Margin = " + Float.toString(parameters.getConstraintMargin())); wfJsonData.put(OnlineWorkflowCommand.CONSTRAINT_MARGIN, Float.toString(parameters.getConstraintMargin())); if (parameters.getCaseFile() != null) { table.addCell(" "); table.addCell(" "); table.addCell("Case file = " + parameters.getCaseFile()); wfJsonData.put(OnlineWorkflowCommand.CASE_FILE, parameters.getCaseFile()); } } else { table.addCell("-"); } } jsonData.add(wfJsonData); } if (line.hasOption("json")) { Path jsonFile = Paths.get(line.getOptionValue("json")); try (FileWriter jsonFileWriter = new FileWriter(jsonFile.toFile())) { //JSONSerializer.toJSON(jsonData).write(jsonFileWriter); jsonFileWriter.write(JSONSerializer.toJSON(jsonData).toString(3)); } } else System.out.println(table.render()); onlinedb.close(); }
From source file:eu.itesla_project.online.tools.OnlineWorkflowTool.java
License:Mozilla Public License
@Override public void run(CommandLine line) throws Exception { OnlineWorkflowStartParameters startconfig = OnlineWorkflowStartParameters.loadDefault(); String host = line.getOptionValue(OnlineWorkflowCommand.HOST); String port = line.getOptionValue(OnlineWorkflowCommand.PORT); String threads = line.getOptionValue(OnlineWorkflowCommand.THREADS); if (host != null) startconfig.setJmxHost(host);/*from w ww . j a v a2 s. com*/ if (port != null) startconfig.setJmxPort(Integer.valueOf(port)); if (threads != null) startconfig.setThreads(Integer.valueOf(threads)); Set<DateTime> baseCasesSet = null; OnlineWorkflowParameters params = OnlineWorkflowParameters.loadDefault(); boolean atLeastOneBaseCaseLineParam = line.hasOption(OnlineWorkflowCommand.CASE_TYPE) || line.hasOption(OnlineWorkflowCommand.COUNTRIES) || line.hasOption(OnlineWorkflowCommand.BASE_CASE) || line.hasOption(OnlineWorkflowCommand.BASECASES_INTERVAL); boolean allNeededBaseCaseLineParams = line.hasOption(OnlineWorkflowCommand.CASE_TYPE) && line.hasOption(OnlineWorkflowCommand.COUNTRIES) && (line.hasOption(OnlineWorkflowCommand.BASE_CASE) || line.hasOption(OnlineWorkflowCommand.BASECASES_INTERVAL)); if (line.hasOption(OnlineWorkflowCommand.CASE_FILE)) { if (atLeastOneBaseCaseLineParam) { showHelp("parameter " + OnlineWorkflowCommand.CASE_FILE + " cannot be used together with parameters: " + OnlineWorkflowCommand.CASE_TYPE + ", " + OnlineWorkflowCommand.COUNTRIES + ", " + OnlineWorkflowCommand.BASE_CASE + ", " + OnlineWorkflowCommand.BASECASES_INTERVAL); return; } params.setCaseFile(line.getOptionValue(OnlineWorkflowCommand.CASE_FILE)); } else { if (params.getCaseFile() != null) { if (atLeastOneBaseCaseLineParam) { if (!allNeededBaseCaseLineParams) { showHelp("to override default parameter " + OnlineWorkflowCommand.CASE_FILE + ", all these parameters must be specified: " + OnlineWorkflowCommand.CASE_TYPE + ", " + OnlineWorkflowCommand.COUNTRIES + ", " + OnlineWorkflowCommand.BASE_CASE + " or " + OnlineWorkflowCommand.BASECASES_INTERVAL); return; } params.setCaseFile(null); } } if (line.hasOption(OnlineWorkflowCommand.CASE_TYPE)) params.setCaseType(CaseType.valueOf(line.getOptionValue(OnlineWorkflowCommand.CASE_TYPE))); if (line.hasOption(OnlineWorkflowCommand.COUNTRIES)) { params.setCountries(Arrays.stream(line.getOptionValue(OnlineWorkflowCommand.COUNTRIES).split(",")) .map(Country::valueOf).collect(Collectors.toSet())); } if (line.hasOption(OnlineWorkflowCommand.BASECASES_INTERVAL)) { Interval basecasesInterval = Interval .parse(line.getOptionValue(OnlineWorkflowCommand.BASECASES_INTERVAL)); OnlineConfig oConfig = OnlineConfig.load(); CaseRepository caseRepo = oConfig.getCaseRepositoryFactoryClass().newInstance() .create(new LocalComputationManager()); baseCasesSet = caseRepo.dataAvailable(params.getCaseType(), params.getCountries(), basecasesInterval); System.out.println("Base cases available for interval " + basecasesInterval.toString()); baseCasesSet.forEach(x -> { System.out.println(" " + x); }); } if (baseCasesSet == null) { baseCasesSet = new HashSet<>(); String base = line.getOptionValue(OnlineWorkflowCommand.BASE_CASE); if (base != null) { baseCasesSet.add(DateTime.parse(base)); } else { baseCasesSet.add(params.getBaseCaseDate()); } } } String histo = line.getOptionValue(OnlineWorkflowCommand.HISTODB_INTERVAL); if (histo != null) params.setHistoInterval(Interval.parse(histo)); String states = line.getOptionValue(OnlineWorkflowCommand.STATES); if (states != null) params.setStates(Integer.parseInt(states)); String timeHorizon = line.getOptionValue(OnlineWorkflowCommand.TIME_HORIZON); if (timeHorizon != null) params.setTimeHorizon(TimeHorizon.fromName(timeHorizon)); String workflowid = line.getOptionValue(OnlineWorkflowCommand.WORKFLOW_ID); if (workflowid != null) params.setOfflineWorkflowId(workflowid); String feAnalysisId = line.getOptionValue(OnlineWorkflowCommand.FEANALYSIS_ID); if (feAnalysisId != null) params.setFeAnalysisId(feAnalysisId); String rulesPurity = line.getOptionValue(OnlineWorkflowCommand.RULES_PURITY); if (rulesPurity != null) params.setRulesPurityThreshold(Double.parseDouble(rulesPurity)); if (line.hasOption(OnlineWorkflowCommand.STORE_STATES)) params.setStoreStates(true); if (line.hasOption(OnlineWorkflowCommand.ANALYSE_BASECASE)) params.setAnalyseBasecase(true); if (line.hasOption(OnlineWorkflowCommand.VALIDATION)) { params.setValidation(true); params.setStoreStates(true); // if validation then store states params.setAnalyseBasecase(true); // if validation then analyze base case } Set<SecurityIndexType> securityIndexes = null; if (line.hasOption(OnlineWorkflowCommand.SECURITY_INDEXES)) { if (!"ALL".equals(line.getOptionValue(OnlineWorkflowCommand.SECURITY_INDEXES))) securityIndexes = Arrays .stream(line.getOptionValue(OnlineWorkflowCommand.SECURITY_INDEXES).split(",")) .map(SecurityIndexType::valueOf).collect(Collectors.toSet()); params.setSecurityIndexes(securityIndexes); } if (line.hasOption(OnlineWorkflowCommand.MERGE_OPTIMIZED)) params.setMergeOptimized(true); String limitReduction = line.getOptionValue(OnlineWorkflowCommand.LIMIT_REDUCTION); if (limitReduction != null) params.setLimitReduction(Float.parseFloat(limitReduction)); if (line.hasOption(OnlineWorkflowCommand.HANDLE_VIOLATION_IN_N)) { params.setHandleViolationsInN(true); params.setAnalyseBasecase(true); // if I need to handle violations in N, I need to analyze base case } String constraintMargin = line.getOptionValue(OnlineWorkflowCommand.CONSTRAINT_MARGIN); if (constraintMargin != null) params.setConstraintMargin(Float.parseFloat(constraintMargin)); String urlString = "service:jmx:rmi:///jndi/rmi://" + startconfig.getJmxHost() + ":" + startconfig.getJmxPort() + "/jmxrmi"; JMXServiceURL serviceURL = new JMXServiceURL(urlString); Map<String, String> jmxEnv = new HashMap<>(); JMXConnector connector = JMXConnectorFactory.connect(serviceURL, jmxEnv); MBeanServerConnection mbsc = connector.getMBeanServerConnection(); ObjectName name = new ObjectName(LocalOnlineApplicationMBean.BEAN_NAME); LocalOnlineApplicationMBean application = MBeanServerInvocationHandler.newProxyInstance(mbsc, name, LocalOnlineApplicationMBean.class, false); if (line.hasOption(OnlineWorkflowCommand.START_CMD)) { if (params.getCaseFile() != null) { System.out.println("starting Online Workflow, caseFile " + params.getCaseFile()); String workflowId = application.startWorkflow(startconfig, params); System.out.println("workflowId=" + workflowId); } else { for (DateTime basecase : baseCasesSet) { params.setBaseCaseDate(basecase); System.out.println("starting Online Workflow, basecase " + basecase.toString()); String workflowId = application.startWorkflow(startconfig, params); System.out.println("workflowId=" + workflowId); } } } else if (line.hasOption(OnlineWorkflowCommand.SHUTDOWN_CMD)) { application.shutdown(); } else { showHelp(""); } }