List of usage examples for org.joda.time Interval toString
public String toString()
From source file:candlelight.joda.converters.JodaIntervalConverter.java
License:Apache License
public String convertToDatabaseColumn(Interval interval) { return interval.toString(); }
From source file:com.fatboyindustrial.gsonjodatime.IntervalConverter.java
License:Open Source License
/** * Gson invokes this call-back method during serialization when it encounters a field of the * specified type. <p>//from w w w .ja v a 2 s.co m * * In the implementation of this call-back method, you should consider invoking * {@link JsonSerializationContext#serialize(Object, Type)} method to create JsonElements for any * non-trivial field of the {@code src} object. However, you should never invoke it on the * {@code src} object itself since that will cause an infinite loop (Gson will call your * call-back method again). * * @param src the object that needs to be converted to Json. * @param typeOfSrc the actual type (fully genericized version) of the source object. * @return a JsonElement corresponding to the specified object. */ @Override public JsonElement serialize(Interval src, Type typeOfSrc, JsonSerializationContext context) { return new JsonPrimitive(src.toString()); }
From source file:com.metamx.druid.realtime.plumber.RealtimePlumberSchool.java
License:Open Source License
private File computePersistDir(Schema schema, Interval interval) { return new File(computeBaseDir(schema), interval.toString().replace("/", "_")); }
From source file:com.studium.joda.converters.JodaIntervalConverter.java
License:Apache License
public String convertToDatabaseColumn(Interval interval) { return interval == null ? null : interval.toString(); }
From source file:com.yahoo.druid.hadoop.DruidHelper.java
License:Apache License
protected List<DataSegment> getSegmentsToLoad(String dataSource, Interval interval, String overlordUrl) { String urlStr = "http://" + overlordUrl + "/druid/indexer/v1/action"; logger.info("Sending request to overlord at " + urlStr); String requestJson = getSegmentListUsedActionJson(dataSource, interval.toString()); logger.info("request json is " + requestJson); int numTries = 3; //TODO: should be configurable? for (int trial = 0; trial < numTries; trial++) { try {/* w w w . j a v a2 s .c o m*/ logger.info("attempt number {} to get list of segments from overlord", trial); URL url = new URL(urlStr); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod("POST"); conn.setRequestProperty("content-type", "application/json"); conn.setUseCaches(false); conn.setDoOutput(true); conn.setConnectTimeout(60000); //TODO: 60 secs, shud be configurable? OutputStream out = conn.getOutputStream(); out.write(requestJson.getBytes()); out.close(); int responseCode = conn.getResponseCode(); if (responseCode == 200) { ObjectMapper mapper = DruidInitialization.getInstance().getObjectMapper(); Map<String, Object> obj = mapper.readValue(conn.getInputStream(), new TypeReference<Map<String, Object>>() { }); return mapper.convertValue(obj.get("result"), new TypeReference<List<DataSegment>>() { }); } else { logger.warn( "Attempt Failed to get list of segments from overlord. response code {} , response {}", responseCode, IOUtils.toString(conn.getInputStream())); } } catch (Exception ex) { logger.warn("Exception in getting list of segments from overlord", ex); } try { Thread.sleep(5000); //wait before next trial } catch (InterruptedException ex) { Throwables.propagate(ex); } } throw new RuntimeException( String.format("failed to find list of segments, dataSource[%s], interval[%s], overlord[%s]", dataSource, interval, overlordUrl)); }
From source file:com.yahoo.druid.hadoop.HiveDatasourceInputFormat.java
License:Apache License
private String getSegmentsToLoad(String dataSource, List<Interval> intervals, String overlordUrl) throws MalformedURLException { logger.info("CheckPost7"); String urlStr = "http://" + overlordUrl + "/druid/indexer/v1/action"; logger.info("Sending request to overlord at " + urlStr); Interval interval = intervals.get(0); String requestJson = getSegmentListUsedActionJson(interval.toString()); logger.info("request json is " + requestJson); int numTries = 3; for (int trial = 0; trial < numTries; trial++) { try {/*w ww . j av a2s. co m*/ logger.info("attempt number {} to get list of segments from overlord", trial); Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress("httpproxy-prod.blue.ygrid.yahoo.com", 4080)); URL url = new URL(String.format("%s/druid/coordinator/v1/metadata/datasources/%s/segments?full", overlordUrl, dataSource)); //new URL(urlStr); HttpURLConnection conn = (HttpURLConnection) url.openConnection(proxy); conn.setRequestMethod("POST"); conn.setRequestProperty("content-type", "application/json"); conn.setRequestProperty("Accept", "*/*"); conn.setUseCaches(false); conn.setDoOutput(true); conn.setConnectTimeout(60000); conn.usingProxy(); OutputStream out = conn.getOutputStream(); out.write(requestJson.getBytes()); out.close(); int responseCode = conn.getResponseCode(); if (responseCode == 200) { return IOUtils.toString(conn.getInputStream()); } else { logger.warn( "Attempt Failed to get list of segments from overlord. response code [%s] , response [%s]", responseCode, IOUtils.toString(conn.getInputStream())); } } catch (Exception ex) { logger.warn("Exception in getting list of segments from overlord", ex); } try { Thread.sleep(5000); //wait before next trial } catch (InterruptedException ex) { Throwables.propagate(ex); } } throw new RuntimeException( String.format("failed to find list of segments, dataSource[%s], interval[%s], overlord[%s]", dataSource, interval, overlordUrl)); }
From source file:de.rwth.idsg.xsharing.router.iv.util.CustomIntervalSerializer.java
License:Open Source License
@Override public void serialize(Interval interval, JsonGenerator jgen, SerializerProvider provider) throws IOException { jgen.writeString(interval.toString()); }
From source file:eu.itesla_project.commons.jaxb.IntervalAdapter.java
License:Mozilla Public License
@Override public String marshal(Interval v) throws Exception { return v.toString(); }
From source file:eu.itesla_project.modules.topo.TopologyContext.java
License:Mozilla Public License
public static Path createTopoCacheDir(Network network, Interval histoInterval, double correlationThreshold, double probabilityThreshold) throws IOException { return PlatformConfig.defaultCacheManager().newCacheEntry("topo").withKey(histoInterval.toString()) .withKey(Double.toString(correlationThreshold)).withKey(Double.toString(probabilityThreshold)) .withKeys(StreamSupport.stream(network.getVoltageLevels().spliterator(), false) .map(Identifiable::getId).sorted().collect(Collectors.toList())) .build().create();//from w w w .ja v a 2 s. c om }
From source file:eu.itesla_project.online.tools.OnlineWorkflowTool.java
License:Mozilla Public License
@Override public void run(CommandLine line) throws Exception { OnlineWorkflowStartParameters startconfig = OnlineWorkflowStartParameters.loadDefault(); String host = line.getOptionValue(OnlineWorkflowCommand.HOST); String port = line.getOptionValue(OnlineWorkflowCommand.PORT); String threads = line.getOptionValue(OnlineWorkflowCommand.THREADS); if (host != null) startconfig.setJmxHost(host);//from w w w . jav a 2 s .co m if (port != null) startconfig.setJmxPort(Integer.valueOf(port)); if (threads != null) startconfig.setThreads(Integer.valueOf(threads)); Set<DateTime> baseCasesSet = null; OnlineWorkflowParameters params = OnlineWorkflowParameters.loadDefault(); boolean atLeastOneBaseCaseLineParam = line.hasOption(OnlineWorkflowCommand.CASE_TYPE) || line.hasOption(OnlineWorkflowCommand.COUNTRIES) || line.hasOption(OnlineWorkflowCommand.BASE_CASE) || line.hasOption(OnlineWorkflowCommand.BASECASES_INTERVAL); boolean allNeededBaseCaseLineParams = line.hasOption(OnlineWorkflowCommand.CASE_TYPE) && line.hasOption(OnlineWorkflowCommand.COUNTRIES) && (line.hasOption(OnlineWorkflowCommand.BASE_CASE) || line.hasOption(OnlineWorkflowCommand.BASECASES_INTERVAL)); if (line.hasOption(OnlineWorkflowCommand.CASE_FILE)) { if (atLeastOneBaseCaseLineParam) { showHelp("parameter " + OnlineWorkflowCommand.CASE_FILE + " cannot be used together with parameters: " + OnlineWorkflowCommand.CASE_TYPE + ", " + OnlineWorkflowCommand.COUNTRIES + ", " + OnlineWorkflowCommand.BASE_CASE + ", " + OnlineWorkflowCommand.BASECASES_INTERVAL); return; } params.setCaseFile(line.getOptionValue(OnlineWorkflowCommand.CASE_FILE)); } else { if (params.getCaseFile() != null) { if (atLeastOneBaseCaseLineParam) { if (!allNeededBaseCaseLineParams) { showHelp("to override default parameter " + OnlineWorkflowCommand.CASE_FILE + ", all these parameters must be specified: " + OnlineWorkflowCommand.CASE_TYPE + ", " + OnlineWorkflowCommand.COUNTRIES + ", " + OnlineWorkflowCommand.BASE_CASE + " or " + OnlineWorkflowCommand.BASECASES_INTERVAL); return; } params.setCaseFile(null); } } if (line.hasOption(OnlineWorkflowCommand.CASE_TYPE)) params.setCaseType(CaseType.valueOf(line.getOptionValue(OnlineWorkflowCommand.CASE_TYPE))); if (line.hasOption(OnlineWorkflowCommand.COUNTRIES)) { params.setCountries(Arrays.stream(line.getOptionValue(OnlineWorkflowCommand.COUNTRIES).split(",")) .map(Country::valueOf).collect(Collectors.toSet())); } if (line.hasOption(OnlineWorkflowCommand.BASECASES_INTERVAL)) { Interval basecasesInterval = Interval .parse(line.getOptionValue(OnlineWorkflowCommand.BASECASES_INTERVAL)); OnlineConfig oConfig = OnlineConfig.load(); CaseRepository caseRepo = oConfig.getCaseRepositoryFactoryClass().newInstance() .create(new LocalComputationManager()); baseCasesSet = caseRepo.dataAvailable(params.getCaseType(), params.getCountries(), basecasesInterval); System.out.println("Base cases available for interval " + basecasesInterval.toString()); baseCasesSet.forEach(x -> { System.out.println(" " + x); }); } if (baseCasesSet == null) { baseCasesSet = new HashSet<>(); String base = line.getOptionValue(OnlineWorkflowCommand.BASE_CASE); if (base != null) { baseCasesSet.add(DateTime.parse(base)); } else { baseCasesSet.add(params.getBaseCaseDate()); } } } String histo = line.getOptionValue(OnlineWorkflowCommand.HISTODB_INTERVAL); if (histo != null) params.setHistoInterval(Interval.parse(histo)); String states = line.getOptionValue(OnlineWorkflowCommand.STATES); if (states != null) params.setStates(Integer.parseInt(states)); String timeHorizon = line.getOptionValue(OnlineWorkflowCommand.TIME_HORIZON); if (timeHorizon != null) params.setTimeHorizon(TimeHorizon.fromName(timeHorizon)); String workflowid = line.getOptionValue(OnlineWorkflowCommand.WORKFLOW_ID); if (workflowid != null) params.setOfflineWorkflowId(workflowid); String feAnalysisId = line.getOptionValue(OnlineWorkflowCommand.FEANALYSIS_ID); if (feAnalysisId != null) params.setFeAnalysisId(feAnalysisId); String rulesPurity = line.getOptionValue(OnlineWorkflowCommand.RULES_PURITY); if (rulesPurity != null) params.setRulesPurityThreshold(Double.parseDouble(rulesPurity)); if (line.hasOption(OnlineWorkflowCommand.STORE_STATES)) params.setStoreStates(true); if (line.hasOption(OnlineWorkflowCommand.ANALYSE_BASECASE)) params.setAnalyseBasecase(true); if (line.hasOption(OnlineWorkflowCommand.VALIDATION)) { params.setValidation(true); params.setStoreStates(true); // if validation then store states params.setAnalyseBasecase(true); // if validation then analyze base case } Set<SecurityIndexType> securityIndexes = null; if (line.hasOption(OnlineWorkflowCommand.SECURITY_INDEXES)) { if (!"ALL".equals(line.getOptionValue(OnlineWorkflowCommand.SECURITY_INDEXES))) securityIndexes = Arrays .stream(line.getOptionValue(OnlineWorkflowCommand.SECURITY_INDEXES).split(",")) .map(SecurityIndexType::valueOf).collect(Collectors.toSet()); params.setSecurityIndexes(securityIndexes); } if (line.hasOption(OnlineWorkflowCommand.MERGE_OPTIMIZED)) params.setMergeOptimized(true); String limitReduction = line.getOptionValue(OnlineWorkflowCommand.LIMIT_REDUCTION); if (limitReduction != null) params.setLimitReduction(Float.parseFloat(limitReduction)); if (line.hasOption(OnlineWorkflowCommand.HANDLE_VIOLATION_IN_N)) { params.setHandleViolationsInN(true); params.setAnalyseBasecase(true); // if I need to handle violations in N, I need to analyze base case } String constraintMargin = line.getOptionValue(OnlineWorkflowCommand.CONSTRAINT_MARGIN); if (constraintMargin != null) params.setConstraintMargin(Float.parseFloat(constraintMargin)); String urlString = "service:jmx:rmi:///jndi/rmi://" + startconfig.getJmxHost() + ":" + startconfig.getJmxPort() + "/jmxrmi"; JMXServiceURL serviceURL = new JMXServiceURL(urlString); Map<String, String> jmxEnv = new HashMap<>(); JMXConnector connector = JMXConnectorFactory.connect(serviceURL, jmxEnv); MBeanServerConnection mbsc = connector.getMBeanServerConnection(); ObjectName name = new ObjectName(LocalOnlineApplicationMBean.BEAN_NAME); LocalOnlineApplicationMBean application = MBeanServerInvocationHandler.newProxyInstance(mbsc, name, LocalOnlineApplicationMBean.class, false); if (line.hasOption(OnlineWorkflowCommand.START_CMD)) { if (params.getCaseFile() != null) { System.out.println("starting Online Workflow, caseFile " + params.getCaseFile()); String workflowId = application.startWorkflow(startconfig, params); System.out.println("workflowId=" + workflowId); } else { for (DateTime basecase : baseCasesSet) { params.setBaseCaseDate(basecase); System.out.println("starting Online Workflow, basecase " + basecase.toString()); String workflowId = application.startWorkflow(startconfig, params); System.out.println("workflowId=" + workflowId); } } } else if (line.hasOption(OnlineWorkflowCommand.SHUTDOWN_CMD)) { application.shutdown(); } else { showHelp(""); } }