List of usage examples for org.hibernate.tool.hbm2ddl SchemaUpdate getExceptions
public List getExceptions()
From source file:ca.mcgill.cs.swevo.qualyzer.model.PersistenceManager.java
License:Open Source License
/** * Update the database of a project.//from w w w .j a v a2 s .co m * @param project */ public void updateDB(IProject project) { String dbPath = getDBPath(project).toOSString(); String connectionString = DB_CONNECTION_STRING.replace(PER_S, dbPath) + DB_INIT_STRING; //$NON-NLS-1$ HibernateDBManager dbManager; dbManager = new HibernateDBManager(connectionString, DB_USERNAME, "", DB_DRIVER, DB_DIALECT); //$NON-NLS-1$ // Init DB SchemaUpdate update = new SchemaUpdate(dbManager.getConfiguration()); update.execute(false, true); dbManager.getSessionFactory().close(); if (!update.getExceptions().isEmpty()) { throw new QualyzerException(Messages.getString("model.PersistenceManager.upgradeError"), //$NON-NLS-1$ (Throwable) update.getExceptions().get(0)); } }
From source file:com.amalto.core.storage.hibernate.HibernateStorage.java
License:Open Source License
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override/*w ww . jav a 2 s . co m*/
public synchronized void prepare(MetadataRepository repository, Set<Expression> optimizedExpressions,
boolean force, boolean dropExistingData) {
if (!force && isPrepared) {
return; // No op operation
}
if (isPrepared) {
close();
internalInit();
}
if (dataSource == null) {
throw new IllegalArgumentException("Datasource is not set."); //$NON-NLS-1$
}
// No support for data models including inheritance AND for g* XSD simple types AND fields that start with
// X_TALEND_
try {
repository.accept(METADATA_CHECKER);
userMetadataRepository = repository;
} catch (Exception e) {
throw new RuntimeException("Exception occurred during unsupported features check.", e); //$NON-NLS-1$
}
// Loads additional types for staging area.
if (storageType == StorageType.STAGING) {
userMetadataRepository = repository.copy(); // See TMDM-6938: prevents staging types to appear in master
// storage.
userMetadataRepository
.load(MetadataRepositoryAdmin.class.getResourceAsStream("stagingInternalTypes.xsd")); //$NON-NLS-1$
}
// Create class loader for storage's dynamically created classes.
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
Class<? extends StorageClassLoader> clazz;
try {
try {
clazz = (Class<? extends StorageClassLoader>) Class.forName(ALTERNATE_CLASS_LOADER);
} catch (ClassNotFoundException e) {
clazz = (Class<? extends StorageClassLoader>) Class.forName(CLASS_LOADER);
}
Constructor<? extends StorageClassLoader> constructor = clazz.getConstructor(ClassLoader.class,
String.class, StorageType.class);
storageClassLoader = constructor.newInstance(contextClassLoader, storageName, storageType);
storageClassLoader.setDataSourceConfiguration(dataSource);
storageClassLoader.generateHibernateConfig(); // Checks if configuration can be generated.
} catch (Exception e) {
throw new RuntimeException("Could not create storage class loader", e); //$NON-NLS-1$
}
if (dropExistingData) {
LOGGER.info("Cleaning existing database content."); //$NON-NLS-1$
StorageCleaner cleaner = new JDBCStorageCleaner(new FullTextIndexCleaner());
cleaner.clean(this);
} else {
LOGGER.info("*NOT* cleaning existing database content."); //$NON-NLS-1$
}
if (autoPrepare) {
LOGGER.info("Preparing database before schema generation."); //$NON-NLS-1$
StorageInitializer initializer = new JDBCStorageInitializer();
if (initializer.supportInitialization(this)) {
if (!initializer.isInitialized(this)) {
initializer.initialize(this);
} else {
LOGGER.info("Database is already prepared."); //$NON-NLS-1$
}
} else {
LOGGER.info("Datasource is not configured for automatic initialization."); //$NON-NLS-1$
}
} else {
LOGGER.info("*NOT* preparing database before schema generation."); //$NON-NLS-1$
}
try {
Thread.currentThread().setContextClassLoader(storageClassLoader);
// Mapping of data model types to RDBMS (i.e. 'flatten' representation of types).
MetadataRepository internalRepository;
try {
InternalRepository typeEnhancer = getTypeEnhancer();
internalRepository = userMetadataRepository.accept(typeEnhancer);
mappingRepository = typeEnhancer.getMappings();
} catch (Exception e) {
throw new RuntimeException("Exception occurred during type mapping creation.", e); //$NON-NLS-1$
}
// Set fields to be indexed in database.
Set<FieldMetadata> databaseIndexedFields = new HashSet<FieldMetadata>();
switch (storageType) {
case MASTER:
// Adds indexes on user defined fields
for (Expression optimizedExpression : optimizedExpressions) {
Collection<FieldMetadata> indexedFields = RecommendedIndexes.get(optimizedExpression);
for (FieldMetadata indexedField : indexedFields) {
// TMDM-5896: Don't index Composite Key fields
if (indexedField instanceof CompoundFieldMetadata) {
continue;
}
// TMDM-5311: Don't index TEXT fields
TypeMetadata indexedFieldType = indexedField.getType();
if (!isIndexable(indexedFieldType)) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Ignore index on field '" + indexedField.getName() //$NON-NLS-1$
+ "' because value is stored in TEXT."); //$NON-NLS-1$
}
continue;
}
// Go up the containment tree in case containing type is anonymous.
ComplexTypeMetadata containingType = indexedField.getContainingType().getEntity();
TypeMapping mapping = mappingRepository.getMappingFromUser(containingType);
FieldMetadata databaseField = mapping.getDatabase(indexedField);
if (databaseField == null) {
LOGGER.error("Could not index field '" + indexedField + "' (" + indexedField.getPath() //$NON-NLS-1$ //$NON-NLS-2$
+ "), ignoring index."); //$NON-NLS-1$
continue;
} else if (!isIndexable(databaseField.getType())) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Ignore index on field '" + indexedField.getName() //$NON-NLS-1$
+ "' because value (in database mapping) is stored in TEXT."); //$NON-NLS-1$
}
continue; // Don't take into indexed fields long text fields
}
// Database specific behaviors
switch (dataSource.getDialectName()) {
case SQL_SERVER:
// TMDM-8144: Don't index field name on SQL Server when size > 900
String maxLengthStr = indexedField.getType()
.<String>getData(MetadataRepository.DATA_MAX_LENGTH);
if (maxLengthStr == null) { // go up the type inheritance tree to find max length annotation
TypeMetadata type = indexedField.getType();
while (!XMLConstants.W3C_XML_SCHEMA_NS_URI.equals(type.getNamespace())
&& !type.getSuperTypes().isEmpty()) {
type = type.getSuperTypes().iterator().next();
maxLengthStr = type.<String>getData(MetadataRepository.DATA_MAX_LENGTH);
if (maxLengthStr != null) {
break;
}
}
}
if (maxLengthStr != null) {
Integer maxLength = Integer.parseInt(maxLengthStr);
if (maxLength > 900) {
LOGGER.warn("Skip index on field '" + indexedField.getPath() //$NON-NLS-1$
+ "' (too long value)."); //$NON-NLS-1$
continue;
}
}
break;
case H2:
case MYSQL:
case POSTGRES:
case DB2:
case ORACLE_10G:
default:
// Nothing to do for these databases
break;
}
databaseIndexedFields.add(databaseField);
if (!databaseField.getContainingType().isInstantiable()) {
Collection<ComplexTypeMetadata> roots = RecommendedIndexes
.getRoots(optimizedExpression);
for (ComplexTypeMetadata root : roots) {
List<FieldMetadata> path = StorageMetadataUtils.path(
mappingRepository.getMappingFromUser(root).getDatabase(), databaseField);
if (path.size() > 1) {
databaseIndexedFields.addAll(path.subList(0, path.size() - 1));
} else {
LOGGER.warn("Failed to properly index field '" + databaseField + "'."); //$NON-NLS-1$ //$NON-NLS-2$
}
}
}
}
}
break;
case STAGING:
if (!optimizedExpressions.isEmpty()) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Ignoring " + optimizedExpressions.size() //$NON-NLS-1$
+ " to optimize (disabled on staging area)."); //$NON-NLS-1$
}
}
// Adds "staging status" / "staging block key" / "staging task id" as indexed fields
for (TypeMapping typeMapping : mappingRepository.getAllTypeMappings()) {
ComplexTypeMetadata database = typeMapping.getDatabase();
if (database.hasField(METADATA_STAGING_STATUS)) {
databaseIndexedFields.add(database.getField(METADATA_STAGING_STATUS));
}
if (database.hasField(METADATA_STAGING_BLOCK_KEY)) {
databaseIndexedFields.add(database.getField(METADATA_STAGING_BLOCK_KEY));
}
if (database.hasField(METADATA_TASK_ID)) {
databaseIndexedFields.add(database.getField(METADATA_TASK_ID));
}
}
break;
case SYSTEM: // Nothing to index on SYSTEM
break;
}
// Don't add FK in indexes if using H2
if (dataSource.getDialectName() == RDBMSDataSource.DataSourceDialect.H2) {
Iterator<FieldMetadata> indexedFields = databaseIndexedFields.iterator();
while (indexedFields.hasNext()) {
FieldMetadata field = indexedFields.next();
if (field instanceof ReferenceFieldMetadata || field.isKey()) {
indexedFields.remove(); // H2 doesn't like indexes on PKs or FKs.
}
}
}
switch (dataSource.getDialectName()) {
case ORACLE_10G:
tableResolver = new OracleStorageTableResolver(databaseIndexedFields,
dataSource.getNameMaxLength());
break;
default:
tableResolver = new StorageTableResolver(databaseIndexedFields, dataSource.getNameMaxLength());
}
storageClassLoader.setTableResolver(tableResolver);
// Master, Staging and System share same class creator.
switch (storageType) {
case MASTER:
case STAGING:
case SYSTEM:
hibernateClassCreator = new ClassCreator(storageClassLoader);
break;
}
// Create Hibernate classes (after some modifications to the types).
try {
internalRepository.accept(hibernateClassCreator);
} catch (Exception e) {
throw new RuntimeException("Exception occurred during dynamic classes creation.", e); //$NON-NLS-1$
}
// Last step: configuration of Hibernate
try {
// Hibernate needs to have dynamic classes in context class loader during configuration.
InputStream ehCacheConfig = storageClassLoader
.getResourceAsStream(StorageClassLoader.EHCACHE_XML_CONFIG);
if (ehCacheConfig != null) {
CacheManager.create(ehCacheConfig);
}
configuration.configure(StorageClassLoader.HIBERNATE_CONFIG);
batchSize = Integer.parseInt(configuration.getProperty(Environment.STATEMENT_BATCH_SIZE));
// Sets default schema for Oracle
Properties properties = configuration.getProperties();
if (dataSource.getDialectName() == RDBMSDataSource.DataSourceDialect.ORACLE_10G) {
properties.setProperty(Environment.DEFAULT_SCHEMA, dataSource.getUserName());
}
// Logs DDL *before* initialization in case initialization fails (useful for debugging).
if (LOGGER.isTraceEnabled()) {
traceDDL();
}
// Customize schema generation according to datasource content.
RDBMSDataSource.SchemaGeneration schemaGeneration = dataSource.getSchemaGeneration();
List exceptions = Collections.emptyList();
switch (schemaGeneration) {
case CREATE:
SchemaExport schemaExport = new SchemaExport(configuration);
schemaExport.create(false, true);
// Exception may happen during recreation (hibernate may perform statements on tables that does
// not exist): these exceptions are supposed to be harmless (but log them to DEBUG just in case).
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Exception(s) occurred during schema creation:"); //$NON-NLS-1$
for (Object exceptionObject : schemaExport.getExceptions()) {
LOGGER.debug(((Exception) exceptionObject).getMessage());
}
}
break;
case VALIDATE:
SchemaValidator schemaValidator = new SchemaValidator(configuration);
schemaValidator.validate(); // This is supposed to throw exception on validation issue.
break;
case UPDATE:
SchemaUpdate schemaUpdate = new SchemaUpdate(configuration);
schemaUpdate.execute(false, true);
exceptions = schemaUpdate.getExceptions();
break;
}
// Throw an exception if schema update met issue(s).
if (!exceptions.isEmpty()) {
StringBuilder sb = new StringBuilder();
sb.append("Could not prepare database schema: "); //$NON-NLS-1$
Iterator iterator = exceptions.iterator();
while (iterator.hasNext()) {
Exception exception = (Exception) iterator.next();
if (exception instanceof SQLException) {
SQLException currentSQLException = (SQLException) exception;
while (currentSQLException != null) {
sb.append(currentSQLException.getMessage());
sb.append('\n');
currentSQLException = currentSQLException.getNextException();
}
} else if (exception != null) {
sb.append(exception.getMessage());
}
if (iterator.hasNext()) {
sb.append('\n');
}
}
throw new IllegalStateException(sb.toString());
}
// Initialize Hibernate
Environment.verifyProperties(properties);
ConfigurationHelper.resolvePlaceHolders(properties);
ServiceRegistry serviceRegistry = new StandardServiceRegistryBuilder().applySettings(properties)
.build();
factory = configuration.buildSessionFactory(serviceRegistry);
MDMTransactionSessionContext.declareStorage(this, factory);
} catch (Exception e) {
throw new RuntimeException("Exception occurred during Hibernate initialization.", e); //$NON-NLS-1$
}
// All set: set prepared flag to true.
isPrepared = true;
LOGGER.info("Storage '" + storageName + "' (" + storageType + ") is ready."); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
} catch (Throwable t) {
try {
// This prevent PermGen OOME in case of multiple failures to start.
close();
} catch (Exception e) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Error occurred during clean up following failed prepare", e); //$NON-NLS-1$
}
}
throw new RuntimeException("Could not prepare '" + storageName + "'.", t); //$NON-NLS-1$ //$NON-NLS-2$
} finally {
Thread.currentThread().setContextClassLoader(contextClassLoader);
}
}
From source file:com.ironiacorp.persistence.hibernate.GenericHibernateDataSource.java
License:Open Source License
/** * Update the database.//from w ww . j a va2s.c o m * * @throws RuntimeException If an error is found when running the DDL script. */ public void updateDB() { log.debug("Updating the database"); log.debug(getUpdateDDLScript()); SchemaUpdate ddl = new SchemaUpdate(hibernateConfig); List exceptions = null; ddl.execute(false, true); exceptions = handleExceptions(ddl); if (!exceptions.isEmpty()) { throw new RuntimeException("exception.bootstrap.updatedb", (Exception) ddl.getExceptions().get(0)); } }
From source file:com.ironiacorp.persistence.hibernate.HibernateBootstrap.java
License:Open Source License
/** * Update the database./*from w w w . j a va 2 s . c om*/ * * @throws RuntimeException * If an error is found when running the DDL script. */ public void updateDB() { log.debug("Updating the database"); log.debug(getUpdateDDLScript()); SchemaUpdate ddl = new SchemaUpdate(config); List<Exception> exceptions = null; ddl.execute(false, true); exceptions = handleExceptions(ddl); if (!exceptions.isEmpty()) { throw new RuntimeException("exception.bootstrap.updatedb", (Exception) ddl.getExceptions().get(0)); } }
From source file:com.wavemaker.tools.data.ExportDB.java
License:Open Source License
@Override protected void customRun() { init();/*from w w w.ja v a 2s .com*/ final Configuration cfg = new Configuration(); // cfg.addDirectory(this.hbmFilesDir); this.hbmFilesDir.find().files().performOperation(new ResourceOperation<com.wavemaker.tools.io.File>() { @Override public void perform(com.wavemaker.tools.io.File file) { if (file.getName().endsWith(".hbm.xml")) { cfg.addInputStream(file.getContent().asInputStream()); } } }); Properties connectionProperties = getHibernateConnectionProperties(); cfg.addProperties(connectionProperties); SchemaExport export = null; SchemaUpdate update = null; File ddlFile = null; try { if (this.overrideTable) { Callable<SchemaExport> t = new Callable<SchemaExport>() { @Override public SchemaExport call() { return new SchemaExport(cfg); } }; if (this.classesDir == null) { try { export = t.call(); } catch (Exception e) { ReflectionUtils.rethrowRuntimeException(e); } } else { export = ResourceClassLoaderUtils.runInClassLoaderContext(true, t, this.classesDir); } ddlFile = File.createTempFile("ddl", ".sql"); ddlFile.deleteOnExit(); export.setOutputFile(ddlFile.getAbsolutePath()); export.setDelimiter(";"); export.setFormat(true); String extraddl = prepareForExport(this.exportToDatabase); export.create(this.verbose, this.exportToDatabase); this.errors = CastUtils.cast(export.getExceptions()); this.errors = filterError(this.errors, connectionProperties); this.ddl = IOUtils.read(ddlFile); if (!ObjectUtils.isNullOrEmpty(extraddl)) { this.ddl = extraddl + "\n" + this.ddl; } } else { Callable<SchemaUpdate> t = new Callable<SchemaUpdate>() { @Override public SchemaUpdate call() { return new SchemaUpdate(cfg); } }; if (this.classesDir == null) { try { update = t.call(); } catch (Exception e) { ReflectionUtils.rethrowRuntimeException(e); } } else { update = ResourceClassLoaderUtils.runInClassLoaderContext(t, this.classesDir); } prepareForExport(this.exportToDatabase); Connection conn = JDBCUtils.getConnection(this.connectionUrl.toString(), this.username, this.password, this.driverClassName); Dialect dialect = Dialect.getDialect(connectionProperties); DatabaseMetadata meta = new DatabaseMetadata(conn, dialect); String[] updateSQL = cfg.generateSchemaUpdateScript(dialect, meta); update.execute(this.verbose, this.exportToDatabase); this.errors = CastUtils.cast(update.getExceptions()); StringBuilder sb = new StringBuilder(); for (String line : updateSQL) { sb = sb.append(line); sb = sb.append("\n"); } this.ddl = sb.toString(); } } catch (IOException ex) { throw new DataServiceRuntimeException(ex); } catch (SQLException qex) { throw new DataServiceRuntimeException(qex); } catch (RuntimeException rex) { if (rex.getCause() != null && rex.getCause().getMessage().contains(NO_SUITABLE_DRIVER) && WMAppContext.getInstance().isCloudFoundry()) { String msg = rex.getMessage() + " - " + UNKNOWN_DATABASE; throw new DataServiceRuntimeException(msg); } else { throw new DataServiceRuntimeException(rex); } } finally { try { ddlFile.delete(); } catch (Exception ignore) { } } }
From source file:com.zutubi.pulse.master.hibernate.SchemaRefactor.java
License:Apache License
public void sync() { SchemaUpdate schemaUpdate = new SchemaUpdate(config, connectionProperties); schemaUpdate.execute(true, true);/* w w w . j a v a 2s . c o m*/ exceptions = schemaUpdate.getExceptions(); }
From source file:edu.ku.brc.specify.tools.SpecifySchemaGenerator.java
License:Open Source License
/** * Creates the Schema.// w w w.j a v a2s. c om * @param driverInfo the driver info to use * @param connectionStr the connection string for creating or opening a database * @param hostname the hostname (localhost) * @param databaseName the database name * @param user the username * @param passwd the password (clear text) * @param doUpdate tells it to update the schema instead of creating it */ protected static void doGenSchema(final DatabaseDriverInfo driverInfo, final String connectionStr, // might be a create or an open connection string final String user, final String passwd, final boolean doUpdate) { // setup the Hibernate configuration Configuration hibCfg = new AnnotationConfiguration(); hibCfg.setProperties(getHibernateProperties(driverInfo, connectionStr, user, passwd, doUpdate)); hibCfg.configure(); if (doUpdate) { SchemaUpdate schemaUpdater = new SchemaUpdate(hibCfg); log.info("Updating schema"); //System.exit(0); boolean doScript = false; log.info("Updating the DB schema"); schemaUpdater.execute(doScript, true); log.info("DB schema Updating completed"); // log the exceptions that occurred List<?> exceptions = schemaUpdater.getExceptions(); for (Object o : exceptions) { Exception e = (Exception) o; log.error(e.getMessage()); } } else { SchemaExport schemaExporter = new SchemaExport(hibCfg); schemaExporter.setDelimiter(";"); log.info("Generating schema"); //System.exit(0); boolean printToScreen = false; boolean exportToDb = true; boolean justDrop = false; boolean justCreate = true; log.info("Creating the DB schema"); schemaExporter.execute(printToScreen, exportToDb, justDrop, justCreate); log.info("DB schema creation completed"); // log the exceptions that occurred List<?> exceptions = schemaExporter.getExceptions(); for (Object o : exceptions) { Exception e = (Exception) o; log.error(e.getMessage()); } } }
From source file:lucee.runtime.orm.hibernate.HibernateSessionFactory.java
License:Open Source License
private static void schemaExport(Log log, Configuration configuration, DatasourceConnection dc, SessionFactoryData data) throws PageException, SQLException, IOException { ORMConfiguration ormConf = data.getORMConfiguration(); if (ORMConfiguration.DBCREATE_NONE == ormConf.getDbCreate()) { return;/*from w ww .ja va 2 s . co m*/ } else if (ORMConfiguration.DBCREATE_DROP_CREATE == ormConf.getDbCreate()) { SchemaExport export = new SchemaExport(configuration); export.setHaltOnError(true); export.execute(false, true, false, false); printError(log, data, export.getExceptions(), false); executeSQLScript(ormConf, dc); } else if (ORMConfiguration.DBCREATE_UPDATE == ormConf.getDbCreate()) { SchemaUpdate update = new SchemaUpdate(configuration); update.setHaltOnError(true); update.execute(false, true); printError(log, data, update.getExceptions(), false); } }
From source file:name.livitski.tools.persista.StorageBootstrap.java
License:Open Source License
@SuppressWarnings("unchecked") private void updateSchema() throws ApplicationBeanException { try {/* www . j av a 2s .c o m*/ String user = readSetting(UpdaterUserNameSetting.class); String password; if (null != user) password = readSetting(UpdaterPasswordSetting.class); else { user = readSetting(UserNameSetting.class); password = readSetting(PasswordSetting.class); } org.hibernate.cfg.Configuration cfg = new org.hibernate.cfg.Configuration(); cfg.setProperty(AvailableSettings.HBM2DDL_AUTO, "update"); cfg.setProperty(AvailableSettings.DIALECT, readSetting(HibernateSQLDialectSetting.class).getName()); cfg.setProperty(AvailableSettings.DRIVER, getJDBCDriverClass().getName()); cfg.setProperty(AvailableSettings.URL, db.getMetaData().getURL()); cfg.setProperty(AvailableSettings.USER, user); cfg.setProperty(AvailableSettings.PASS, password); for (Class<?> clazz : getEntityClasses()) cfg.addAnnotatedClass(clazz); SchemaUpdate worker = new SchemaUpdate(cfg); worker.setDelimiter(";"); worker.setHaltOnError(true); if (null != ddlDumpFile) worker.setOutputFile(ddlDumpFile.getAbsolutePath()); worker.execute(true, true); List<Throwable> errs = (List<Throwable>) worker.getExceptions(); if (null != errs && !errs.isEmpty()) for (Iterator<Throwable> erri = errs.iterator();;) { Throwable err = erri.next(); if (erri.hasNext()) log().error("", err); else throw new SchemaUpdateException(this, "Error(s) occured during the schema update, the last error is shown.", err); } } catch (ConfigurationException badConfig) { throw new StorageConfigurationException(this, badConfig); } catch (SQLException e) { throw new DatabaseException(this, e); } }
From source file:org.bonitasoft.engine.business.data.impl.SchemaManager.java
License:Open Source License
@SuppressWarnings("unchecked") public List<Exception> update(final Set<String> managedClasses) { final SchemaUpdate schemaUpdate = new SchemaUpdate(buildConfiguration(managedClasses)); schemaUpdate.execute(Target.EXPORT); return schemaUpdate.getExceptions(); }