List of usage examples for org.springframework.batch.support DatabaseType fromMetaData
public static DatabaseType fromMetaData(DataSource dataSource) throws MetaDataAccessException
From source file:com.example.configuration.BatchConfiguration.java
public JobRepository getJobRepository() throws Exception { JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean(); factory.setDataSource(config.dataSource()); factory.setTransactionManager(transactionManager); factory.setLobHandler(lobHandler()); factory.setDatabaseType(DatabaseType.fromMetaData(config.dataSource()).name()); factory.setIsolationLevelForCreate("ISOLATION_DEFAULT"); factory.afterPropertiesSet();// www .j a va 2s.c o m return (JobRepository) factory.getObject(); }
From source file:com.example.DBConfig.java
@PostConstruct protected void initialize() throws Exception { String platform = DatabaseType.fromMetaData(this.dataSource()).toString().toLowerCase(); String schemaCreateLocation = this.env.getProperty("schema", DEFAULT_SCHEMA_LOCATION); schemaCreateLocation = schemaCreateLocation.replace("@@platform@@", platform); ResourceDatabasePopulator populator = new ResourceDatabasePopulator(); populator.addScript(this.resourceLoader.getResource(schemaCreateLocation)); populator.setContinueOnError(true);//from w w w.j ava 2s . c o m DatabasePopulatorUtils.execute(populator, dataSource()); }
From source file:gemlite.core.internal.batch.ColumnRangePartitioner.java
/** * Partition a database table assuming that the data in the column specified * are uniformly distributed. The execution context values will have keys * <code>minValue</code> and <code>maxValue</code> specifying the range of * values to consider in each partition. * /*from w ww . ja v a 2 s . co m*/ * @see Partitioner#partition(int) */ public Map<String, ExecutionContext> partition(int gridSize) { //? try { DatabaseType type = DatabaseType.fromMetaData(jdbcTemplate.getDataSource()); if (DatabaseType.SYBASE.equals(type)) { return partitionSybase(gridSize); } else return partitionCommon(gridSize); } catch (Exception e) { LogUtil.getLogger().error("DatabaseType error:", e); return partitionCommon(gridSize); } }
From source file:com.xchanging.support.batch.admin.service.SimpleJobServiceFactoryBean.java
public void afterPropertiesSet() throws Exception { Assert.notNull(dataSource, "DataSource must not be null."); Assert.notNull(jobRepository, "JobRepository must not be null."); Assert.notNull(jobLocator, "JobLocator must not be null."); Assert.notNull(jobLauncher, "JobLauncher must not be null."); jdbcTemplate = new SimpleJdbcTemplate(dataSource); if (incrementerFactory == null) { incrementerFactory = new DefaultDataFieldMaxValueIncrementerFactory(dataSource); }//w w w .ja v a 2s.co m if (databaseType == null) { databaseType = DatabaseType.fromMetaData(dataSource).name(); logger.info("No database type set, using meta data indicating: " + databaseType); } if (lobHandler == null) { lobHandler = new DefaultLobHandler(); } Assert.isTrue(incrementerFactory.isSupportedIncrementerType(databaseType), "'" + databaseType + "' is an unsupported database type. The supported database types are " + StringUtils.arrayToCommaDelimitedString(incrementerFactory.getSupportedIncrementerTypes())); }
From source file:admin.service.SimpleJobServiceFactoryBean.java
public void afterPropertiesSet() throws Exception { Assert.notNull(dataSource, "DataSource must not be null."); Assert.notNull(jobRepository, "JobRepository must not be null."); Assert.notNull(jobLocator, "JobLocator must not be null."); Assert.notNull(jobLauncher, "JobLauncher must not be null."); jdbcTemplate = new JdbcTemplate(dataSource); if (incrementerFactory == null) { incrementerFactory = new DefaultDataFieldMaxValueIncrementerFactory(dataSource); }/* www . j a v a 2 s .c o m*/ if (databaseType == null) { databaseType = DatabaseType.fromMetaData(dataSource).name(); logger.info("No database type set, using meta data indicating: " + databaseType); } if (lobHandler == null) { lobHandler = new DefaultLobHandler(); } if (serializer == null) { XStreamExecutionContextStringSerializer defaultSerializer = new XStreamExecutionContextStringSerializer(); defaultSerializer.afterPropertiesSet(); serializer = defaultSerializer; } Assert.isTrue(incrementerFactory.isSupportedIncrementerType(databaseType), "'" + databaseType + "' is an unsupported database type. The supported database types are " + StringUtils.arrayToCommaDelimitedString(incrementerFactory.getSupportedIncrementerTypes())); }
From source file:gemlite.shell.service.batch.ImportService.java
/** * /*www . j a v a 2 s .c o m*/ * @param template * @param file * @param delimiter * @param quote * @param skipable * @param columns * @param region * @param table * @param encoding * @param linesToSkip * @param dbdriver * //????? * @param dburl * @param dbuser * @param dbpsw * @param sortKey * @param where * @param pageSize * @param fetchSize * @return */ public boolean defineJob(String template, String file, String delimiter, String quote, boolean skipable, String columns, String region, String table, String encoding, int linesToSkip, String dbdriver, String dburl, String dbuser, String dbpsw, String sortKey, String where, int pageSize, int fetchSize) { BatchParameter param = new BatchParameter(template, file, delimiter, quote, skipable, columns, region, table, encoding, linesToSkip, sortKey, where, pageSize, fetchSize); if (!validParameters(param)) return false; String cacheKey = region + template; try { // ??,?db?? if (StringUtils.equals(BatchTemplateTypes.jdbcPartition.getValue(), param.getTemplate()) || StringUtils.equals(BatchTemplateTypes.jdbcpaging.getValue(), param.getTemplate())) { // ?? setDbParameter(dbdriver, dburl, dbuser, dbpsw); saveDbConfig(dbdriver, dburl, dbuser, dbpsw); } // partition,?table if (StringUtils.equals(BatchTemplateTypes.jdbcPartition.getValue(), param.getTemplate())) { DataSource dataSource = null; DatabaseType type = null; // ?? if (jobItems.containsKey(cacheKey)) { dataSource = (DataSource) (jobItems.get(cacheKey).jobContext.getBean("jdbcDataSource")); type = DatabaseType.fromMetaData(dataSource); } else { // ,??? ClassPathXmlApplicationContext jdbc = Util.initContext(true, "batch/job-context.xml", "batch/import-db-jdbc.xml"); dataSource = (DataSource) jdbc.getBean("jdbcDataSource"); type = DatabaseType.fromMetaData(dataSource); jdbc.close(); } if (converters.containsKey(type)) param.setTable(converters.get(type).converte(table, sortKey)); } String jobXMLFile = generator.generateFileJob(region, param); ClassPathXmlApplicationContext jobContext = null; if (StringUtils.equals("file", template)) { jobContext = Util.initContext(false, "batch/job-context.xml", jobXMLFile); } else { // ????db? jobContext = Util.initContext(false, "batch/job-context.xml", "batch/import-db-jdbc.xml", jobXMLFile); } jobContext.setParent(batchContext); jobContext.refresh(); if (LogUtil.getCoreLog().isDebugEnabled()) LogUtil.getCoreLog().debug("Job:" + region + " define success."); JobItem item = new JobItem(); item.attributes = param; item.job = jobContext.getBean(Job.class); item.jobContent = jobXMLFile; item.jobContext = jobContext; jobItems.put(cacheKey, item); return true; } catch (Exception e) { LogUtil.getCoreLog().info("Job define error.", e); throw new GemliteException(e); } }
From source file:org.springframework.batch.core.repository.support.JobRepositoryFactoryBean.java
@Override public void afterPropertiesSet() throws Exception { Assert.notNull(dataSource, "DataSource must not be null."); if (jdbcOperations == null) { jdbcOperations = new JdbcTemplate(dataSource); }//from ww w. j av a 2 s.c om if (incrementerFactory == null) { incrementerFactory = new DefaultDataFieldMaxValueIncrementerFactory(dataSource); } if (databaseType == null) { databaseType = DatabaseType.fromMetaData(dataSource).name(); logger.info("No database type set, using meta data indicating: " + databaseType); } if (lobHandler == null && databaseType.equalsIgnoreCase(DatabaseType.ORACLE.toString())) { lobHandler = new DefaultLobHandler(); } if (serializer == null) { Jackson2ExecutionContextStringSerializer defaultSerializer = new Jackson2ExecutionContextStringSerializer(); serializer = defaultSerializer; } Assert.isTrue(incrementerFactory.isSupportedIncrementerType(databaseType), "'" + databaseType + "' is an unsupported database type. The supported database types are " + StringUtils.arrayToCommaDelimitedString(incrementerFactory.getSupportedIncrementerTypes())); if (lobType != null) { Assert.isTrue(isValidTypes(lobType), "lobType must be a value from the java.sql.Types class"); } super.afterPropertiesSet(); }
From source file:org.springframework.batch.core.test.football.FootballJobSkipIntegrationTests.java
@Autowired public void setDataSource(DataSource dataSource) throws Exception { this.jdbcTemplate = new JdbcTemplate(dataSource); databaseType = DatabaseType.fromMetaData(dataSource); }
From source file:org.springframework.cloud.dataflow.server.batch.SimpleJobServiceFactoryBean.java
@Override public void afterPropertiesSet() throws Exception { Assert.notNull(dataSource, "DataSource must not be null."); Assert.notNull(jobRepository, "JobRepository must not be null."); Assert.notNull(jobLocator, "JobLocator must not be null."); Assert.notNull(jobLauncher, "JobLauncher must not be null."); Assert.notNull(jobExplorer, "JobExplorer must not be null."); jdbcTemplate = new JdbcTemplate(dataSource); if (incrementerFactory == null) { incrementerFactory = new DefaultDataFieldMaxValueIncrementerFactory(dataSource); }// w w w . j av a 2s . com if (databaseType == null) { databaseType = DatabaseType.fromMetaData(dataSource).name(); logger.info("No database type set, using meta data indicating: " + databaseType); } if (lobHandler == null) { lobHandler = new DefaultLobHandler(); } if (serializer == null) { this.serializer = new Jackson2ExecutionContextStringSerializer(); } Assert.isTrue(incrementerFactory.isSupportedIncrementerType(databaseType), "'" + databaseType + "' is an unsupported database type. The supported database types are " + StringUtils.arrayToCommaDelimitedString(incrementerFactory.getSupportedIncrementerTypes())); }
From source file:org.springframework.xd.jdbc.NamedColumnJdbcItemReaderFactory.java
@Override public void afterPropertiesSet() throws Exception { if (!StringUtils.hasText(sql)) { Assert.hasText(tableName, "tableName must be set"); Assert.hasText(columnNames, "columns must be set"); String sql;/*from w w w . ja v a 2 s.c o m*/ if (StringUtils.hasText(partitionClause)) { sql = "SELECT " + columnNames + " FROM " + tableName + " " + partitionClause; } else { sql = "SELECT " + columnNames + " FROM " + tableName; } log.info("Setting SQL to: " + sql); setSql(sql); } else if (StringUtils.hasText(columnNames) || StringUtils.hasText(tableName)) { log.warn("You must set either the 'sql' property or 'tableName' and 'columns'."); } DatabaseType type = DatabaseType.fromMetaData(dataSource); switch (type) { case MYSQL: fetchSize = Integer.MIN_VALUE; // MySql doesn't support getRow for a streaming cursor verifyCursorPosition = false; break; case SQLITE: fetchSize = AbstractCursorItemReader.VALUE_NOT_SET; break; default: // keep configured fetchSize } reader = new NamedColumnJdbcItemReader(); reader.setSql(sql); reader.setFetchSize(fetchSize); reader.setDataSource(dataSource); reader.setVerifyCursorPosition(verifyCursorPosition); reader.afterPropertiesSet(); initialized = true; }