List of usage examples for org.joda.time Interval parse
public static Interval parse(String str)
From source file:dk.dma.commons.util.DateTimeUtil.java
License:Apache License
public static Interval toInterval(String isoXXInterval) { if (!isoXXInterval.contains("/")) { isoXXInterval += "/" + DateTime.now(); }/*from w ww . j a v a 2 s .com*/ return Interval.parse(isoXXInterval); }
From source file:eu.itesla_project.commons.config.MapModuleConfig.java
License:Mozilla Public License
@Override public Interval getIntervalProperty(String name) { return Interval.parse(getStringProperty(name)); }
From source file:eu.itesla_project.commons.jaxb.IntervalAdapter.java
License:Mozilla Public License
@Override public Interval unmarshal(String v) throws Exception { return Interval.parse(v); }
From source file:eu.itesla_project.modules.histo.tools.HistoDbCountAttributesTool.java
License:Mozilla Public License
@Override public void run(CommandLine line) throws Exception { Interval interval = Interval.parse(line.getOptionValue("interval")); HistoDbHorizon horizon = HistoDbHorizon.SN; if (line.hasOption("horizon")) { horizon = HistoDbHorizon.valueOf(line.getOptionValue("horizon")); }/*from w ww . j a v a 2 s . c om*/ OfflineConfig config = OfflineConfig.load(); try (HistoDbClient histoDbClient = config.getHistoDbClientFactoryClass().newInstance().create(true)) { Set<HistoDbAttributeId> attributeIds = new LinkedHashSet<>(histoDbClient.listAttributes()); HistoDbStats stats = histoDbClient.queryStats(attributeIds, interval, horizon, true); for (HistoDbAttributeId attributeId : attributeIds) { System.out .println(attributeId + ";" + (int) stats.getValue(HistoDbStatsType.COUNT, attributeId, -1)); } } }
From source file:eu.itesla_project.modules.histo.tools.HistoDbPrintAttributesTool.java
License:Mozilla Public License
@Override public void run(CommandLine line) throws Exception { OfflineConfig config = OfflineConfig.load(); try (HistoDbClient histoDbClient = config.getHistoDbClientFactoryClass().newInstance().create()) { boolean statistics = line.hasOption("statistics"); Set<HistoDbAttributeId> attrs = new LinkedHashSet<>(); if (!statistics && line.hasOption("add-datetime")) { attrs.add(HistoDbMetaAttributeId.datetime); }/*from w ww . ja va 2 s. c om*/ for (String str : line.getOptionValue("attributes").split(",")) { attrs.add(HistoDbAttributeIdParser.parse(str)); } Interval interval = Interval.parse(line.getOptionValue("interval")); boolean format = line.hasOption("format"); HistoDbHorizon horizon = HistoDbHorizon.SN; if (line.hasOption("horizon")) { horizon = HistoDbHorizon.valueOf(line.getOptionValue("horizon")); } boolean async = false; boolean zipped = false; InputStream is = histoDbClient.queryCsv(statistics ? HistoQueryType.stats : HistoQueryType.data, attrs, interval, horizon, zipped, async); if (format) { format(is, zipped); } else { try (Reader reader = createReader(is, zipped)) { CharStreams.copy(reader, System.out); } } } }
From source file:eu.itesla_project.modules.histo.tools.HistoDbPrintForecastDiffTool.java
License:Mozilla Public License
@Override public void run(CommandLine line) throws Exception { OfflineConfig config = OfflineConfig.load(); try (HistoDbClient histoDbClient = config.getHistoDbClientFactoryClass().newInstance().create()) { Interval interval = Interval.parse(line.getOptionValue("interval")); try (Reader reader = new InputStreamReader(histoDbClient.queryCsv(HistoQueryType.forecastDiff, EnumSet.allOf(Country.class), EnumSet.of(HistoDbEquip.loads, HistoDbEquip.gen), EnumSet.of(HistoDbAttr.P), interval, HistoDbHorizon.DACF, false, false))) { CharStreams.copy(reader, System.out); }// ww w . j av a 2 s.c o m } }
From source file:eu.itesla_project.modules.histo.tools.HistoDbPrintVoltageRangeTool.java
License:Mozilla Public License
@Override public void run(CommandLine line) throws Exception { Interval interval = Interval.parse(line.getOptionValue("interval")); Path caseFile = Paths.get(line.getOptionValue("case-file")); Map<String, VoltageStats> ranges = new HashMap<>(); Network network = Importers.loadNetwork(caseFile); if (network == null) { throw new RuntimeException("Case '" + caseFile + "' not found"); }// www. j a v a2 s. c o m network.getStateManager().allowStateMultiThreadAccess(true); OfflineConfig config = OfflineConfig.load(); try (HistoDbClient histoDbClient = config.getHistoDbClientFactoryClass().newInstance().create()) { Set<HistoDbAttributeId> attrIds = new LinkedHashSet<>(); for (VoltageLevel vl : network.getVoltageLevels()) { attrIds.add(new HistoDbNetworkAttributeId(vl.getId(), HistoDbAttr.V)); } HistoDbStats stats = histoDbClient.queryStats(attrIds, interval, HistoDbHorizon.SN, false); for (VoltageLevel vl : network.getVoltageLevels()) { HistoDbNetworkAttributeId attrId = new HistoDbNetworkAttributeId(vl.getId(), HistoDbAttr.V); float min = stats.getValue(HistoDbStatsType.MIN, attrId, Float.NaN) / vl.getNominalV(); float max = stats.getValue(HistoDbStatsType.MAX, attrId, Float.NaN) / vl.getNominalV(); int count = (int) stats.getValue(HistoDbStatsType.COUNT, attrId, 0); VoltageStats vstats = new VoltageStats(Range.closed(min, max), count, vl.getNominalV()); for (Generator g : vl.getGenerators()) { vstats.pmax += g.getMaxP(); } ranges.put(vl.getId(), vstats); } } Table table = new Table(7, BorderStyle.CLASSIC_WIDE); table.addCell("ID"); table.addCell("vnom"); table.addCell("range"); table.addCell("min"); table.addCell("max"); table.addCell("count"); table.addCell("pmax"); ranges.entrySet().stream().sorted((e1, e2) -> { VoltageStats stats1 = e1.getValue(); VoltageStats stats2 = e2.getValue(); Range<Float> r1 = stats1.range; Range<Float> r2 = stats2.range; float s1 = r1.upperEndpoint() - r1.lowerEndpoint(); float s2 = r2.upperEndpoint() - r2.lowerEndpoint(); return Float.compare(s1, s2); }).forEach(e -> { String vlId = e.getKey(); VoltageStats stats = e.getValue(); Range<Float> r = stats.range; float s = r.upperEndpoint() - r.lowerEndpoint(); table.addCell(vlId); table.addCell(Float.toString(stats.vnom)); table.addCell(Float.toString(s)); table.addCell(Float.toString(r.lowerEndpoint())); table.addCell(Float.toString(r.upperEndpoint())); table.addCell(Integer.toString(stats.count)); table.addCell(Float.toString(stats.pmax)); }); System.out.println(table.render()); }
From source file:eu.itesla_project.modules.mcla.ForecastErrorsAnalyzerParameters.java
License:Mozilla Public License
public static ForecastErrorsAnalyzerParameters fromFile(Path file) throws FileNotFoundException, IOException { Properties properties = new Properties(); try (InputStream input = new FileInputStream(file.toFile())) { properties.load(input);/*from ww w . j a v a 2 s . c o m*/ return new ForecastErrorsAnalyzerParameters(Interval.parse(properties.getProperty("histoInterval")), properties.getProperty("feAnalysisId"), Double.parseDouble(properties.getProperty("ir")), Integer.parseInt(properties.getProperty("flagPQ")), Integer.parseInt(properties.getProperty("method")), Integer.parseInt(properties.getProperty("nClusters")), Double.parseDouble(properties.getProperty("percentileHistorical")), Integer.parseInt(properties.getProperty("modalityGaussian")), Integer.parseInt(properties.getProperty("outliers")), Integer.parseInt(properties.getProperty("conditionalSampling")), Integer.parseInt( (properties.getProperty("nSamples") != null) ? properties.getProperty("nSamples") : "-1")); } }
From source file:eu.itesla_project.modules.online.OnlineWorkflowParameters.java
License:Mozilla Public License
public static OnlineWorkflowParameters loadDefault() { ModuleConfig config = PlatformConfig.defaultConfig().getModuleConfig("online-default-parameters"); int states = config.getIntProperty("states"); String offlineWorkflowId = config.getStringProperty("offlineWorkflowId", null); TimeHorizon timeHorizon = TimeHorizon.fromName(config.getStringProperty("timeHorizon").trim()); Interval histoInterval = Interval.parse(config.getStringProperty("histoInterval")); String feAnalysisId = config.getStringProperty("feAnalysisId"); double rulesPurityThreshold = Double.parseDouble(config.getStringProperty("rulesPurityThreshold")); boolean storeStates = config.getBooleanProperty("storeStates", false); boolean analyseBasecase = config.getBooleanProperty("analyseBasecase", true); boolean validation = config.getBooleanProperty("validation", false); Set<SecurityIndexType> securityIndexes = config.getEnumSetProperty("securityIndexes", SecurityIndexType.class, null); boolean mergeOptimized = config.getBooleanProperty("mergeOptimized", DEFAULT_MERGE_OPTIMIZED); float limitReduction = config.getFloatProperty("limitReduction", DEFAULT_LIMIT_REDUCTION); boolean handleViolationsInN = config.getBooleanProperty("handleViolationsInN", DEFAULT_HANDLE_VIOLATIONS_IN_N); float constraintMargin = config.getFloatProperty("constraintMargin", DEFAULT_CONSTRAINT_MARGIN); String caseFile = config.getStringProperty("caseFile", null); if (caseFile != null) { if ((config.getStringProperty("baseCaseDate", null) != null) || (config.getStringProperty("caseType", null) != null) || (config.getStringProperty("countries", null) != null)) throw new RuntimeException( "caseFile and ( baseCaseDate, caseType, countries ) are mutually exclusive options"); return new OnlineWorkflowParameters(states, histoInterval, offlineWorkflowId, timeHorizon, feAnalysisId, rulesPurityThreshold, storeStates, analyseBasecase, validation, securityIndexes, mergeOptimized, limitReduction, handleViolationsInN, constraintMargin, caseFile); }// w w w .ja v a 2s.c o m DateTime baseCaseDate = DateTime.parse(config.getStringProperty("baseCaseDate")); CaseType caseType = config.getEnumProperty("caseType", CaseType.class); Set<Country> countries = config.getEnumSetProperty("countries", Country.class); return new OnlineWorkflowParameters(baseCaseDate, states, histoInterval, offlineWorkflowId, timeHorizon, feAnalysisId, rulesPurityThreshold, storeStates, analyseBasecase, validation, securityIndexes, caseType, countries, mergeOptimized, limitReduction, handleViolationsInN, constraintMargin); }
From source file:eu.itesla_project.modules.OptimizerTool.java
License:Mozilla Public License
@Override public void run(CommandLine line) throws Exception { Path caseFile = Paths.get(line.getOptionValue("case-file")); Interval histoInterval = Interval.parse(line.getOptionValue("history-interval")); boolean checkConstraints = line.hasOption("check-constraints"); double correlationThreshold = Double.parseDouble(line.getOptionValue("correlation-threshold")); double probabilityThreshold = Double.parseDouble(line.getOptionValue("probability-threshold")); boolean generationSampled = line.hasOption("generation-sampled"); boolean boundariesSampled = line.hasOption("boundaries-sampled"); try (ComputationManager computationManager = new LocalComputationManager()) { System.out.println("loading case..."); // load the network Network network = Importers.loadNetwork(caseFile); if (network == null) { throw new RuntimeException("Case '" + caseFile + "' not found"); }// ww w .j a va2 s .c om network.getStateManager().allowStateMultiThreadAccess(true); System.out.println("sample characteristics: " + SampleCharacteritics.fromNetwork(network, generationSampled, boundariesSampled)); OfflineConfig config = OfflineConfig.load(); try (HistoDbClient histoDbClient = config.getHistoDbClientFactoryClass().newInstance().create(); TopologyMiner topologyMiner = config.getTopologyMinerFactoryClass().newInstance().create()) { Optimizer optimizer = config.getOptimizerFactoryClass().newInstance().create(network, computationManager, 0, histoDbClient, topologyMiner); LoadFlow loadFlow = config.getLoadFlowFactoryClass().newInstance().create(network, computationManager, 0); System.out.println("initializing optimizer..."); TopologyContext topologyContext = TopologyContext.create(network, topologyMiner, histoDbClient, computationManager, histoInterval, correlationThreshold, probabilityThreshold); optimizer.init(new OptimizerParameters(histoInterval), topologyContext); System.out.println("running optimizer..."); OptimizerResult result = optimizer.run(); System.out.println("optimizer status is " + (result.isFeasible() ? "feasible" : "unfeasible") + " (" + result.getMetrics() + ")"); if (result.isFeasible()) { System.out.println("running loadflow..."); LoadFlowResult result2 = loadFlow.run(); System.out.println("loadflow status is " + (result2.isOk() ? "ok" : "nok") + " (" + result2.getMetrics() + ")"); if (result2.isOk() && checkConstraints) { String report = Security.printLimitsViolations(network); if (report != null) { System.out.println(report); } } } } } }