Example usage for org.hibernate.tool.hbm2ddl SchemaExport getExceptions

List of usage examples for org.hibernate.tool.hbm2ddl SchemaExport getExceptions

Introduction

In this page you can find the example usage for org.hibernate.tool.hbm2ddl SchemaExport getExceptions.

Prototype

public List getExceptions() 

Source Link

Document

Returns a List of all Exceptions which occurred during the export.

Usage

From source file:be.fedict.eid.applet.maven.sql.ddl.SQLDDLMojo.java

License:Open Source License

@Override
public void execute() throws MojoExecutionException, MojoFailureException {
    getLog().info("SQL DDL script generator");

    File outputFile = new File(this.outputDirectory, this.outputName);
    getLog().info("Output SQL DDL script file: " + outputFile.getAbsolutePath());

    this.outputDirectory.mkdirs();
    try {/*  www  .  j a  v a2  s  .c om*/
        outputFile.createNewFile();
    } catch (IOException e) {
        throw new MojoExecutionException("I/O error.", e);
    }

    for (ArtifactItem artifactItem : this.artifactItems) {
        getLog().info("artifact: " + artifactItem.getGroupId() + ":" + artifactItem.getArtifactId());
        List<Dependency> dependencies = this.project.getDependencies();
        String version = null;
        for (Dependency dependency : dependencies) {
            if (StringUtils.equals(dependency.getArtifactId(), artifactItem.getArtifactId())
                    && StringUtils.equals(dependency.getGroupId(), artifactItem.getGroupId())) {
                version = dependency.getVersion();
                break;
            }
        }
        getLog().info("artifact version: " + version);
        VersionRange versionRange = VersionRange.createFromVersion(version);
        Artifact artifact = this.artifactFactory.createDependencyArtifact(artifactItem.getGroupId(),
                artifactItem.getArtifactId(), versionRange, "jar", null, Artifact.SCOPE_COMPILE);
        try {
            this.resolver.resolve(artifact, this.remoteRepos, this.local);
        } catch (ArtifactResolutionException e) {
            throw new MojoExecutionException("Unable to resolve artifact.", e);
        } catch (ArtifactNotFoundException e) {
            throw new MojoExecutionException("Unable to find artifact.", e);
        }
        getLog().info("artifact file: " + artifact.getFile().getAbsolutePath());
        getLog().info("hibernate dialect: " + this.hibernateDialect);

        URL artifactUrl;
        try {
            artifactUrl = artifact.getFile().toURI().toURL();
        } catch (MalformedURLException e) {
            throw new MojoExecutionException("URL error.", e);
        }

        URLClassLoader classLoader = new URLClassLoader(new URL[] { artifactUrl },
                this.getClass().getClassLoader());
        Thread.currentThread().setContextClassLoader(classLoader);

        AnnotationDB annotationDb = new AnnotationDB();
        try {
            annotationDb.scanArchives(artifactUrl);
        } catch (IOException e) {
            throw new MojoExecutionException("I/O error.", e);
        }
        Set<String> classNames = annotationDb.getAnnotationIndex().get(Entity.class.getName());
        getLog().info("# JPA entity classes: " + classNames.size());

        AnnotationConfiguration configuration = new AnnotationConfiguration();

        configuration.setProperty("hibernate.dialect", this.hibernateDialect);
        Dialect dialect = Dialect.getDialect(configuration.getProperties());
        getLog().info("dialect: " + dialect.toString());

        for (String className : classNames) {
            getLog().info("JPA entity: " + className);
            Class<?> entityClass;
            try {
                entityClass = classLoader.loadClass(className);
                getLog().info("entity class loader: " + entityClass.getClassLoader());
            } catch (ClassNotFoundException e) {
                throw new MojoExecutionException("class not found.", e);
            }
            configuration.addAnnotatedClass(entityClass);
        }

        SchemaExport schemaExport = new SchemaExport(configuration);
        schemaExport.setFormat(true);
        schemaExport.setHaltOnError(true);
        schemaExport.setOutputFile(outputFile.getAbsolutePath());
        schemaExport.setDelimiter(";");

        try {
            getLog().info("SQL DDL script: " + IOUtil.toString(new FileInputStream(outputFile)));
        } catch (FileNotFoundException e) {
            throw new MojoExecutionException("file not found.", e);
        } catch (IOException e) {
            throw new MojoExecutionException("I/O error.", e);
        }

        // operate
        schemaExport.execute(true, false, false, true);
        List<Exception> exceptions = schemaExport.getExceptions();
        for (Exception exception : exceptions) {
            getLog().error("exception: " + exception.getMessage());
        }
    }
}

From source file:com.amalto.core.storage.hibernate.HibernateStorage.java

License:Open Source License

@SuppressWarnings({ "unchecked", "rawtypes" })
@Override/*from   w w w.j  ava2  s  .co  m*/
public synchronized void prepare(MetadataRepository repository, Set<Expression> optimizedExpressions,
        boolean force, boolean dropExistingData) {
    if (!force && isPrepared) {
        return; // No op operation
    }
    if (isPrepared) {
        close();
        internalInit();
    }
    if (dataSource == null) {
        throw new IllegalArgumentException("Datasource is not set."); //$NON-NLS-1$
    }
    // No support for data models including inheritance AND for g* XSD simple types AND fields that start with
    // X_TALEND_
    try {
        repository.accept(METADATA_CHECKER);
        userMetadataRepository = repository;
    } catch (Exception e) {
        throw new RuntimeException("Exception occurred during unsupported features check.", e); //$NON-NLS-1$
    }
    // Loads additional types for staging area.
    if (storageType == StorageType.STAGING) {
        userMetadataRepository = repository.copy(); // See TMDM-6938: prevents staging types to appear in master
                                                    // storage.
        userMetadataRepository
                .load(MetadataRepositoryAdmin.class.getResourceAsStream("stagingInternalTypes.xsd")); //$NON-NLS-1$
    }
    // Create class loader for storage's dynamically created classes.
    ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
    Class<? extends StorageClassLoader> clazz;
    try {
        try {
            clazz = (Class<? extends StorageClassLoader>) Class.forName(ALTERNATE_CLASS_LOADER);
        } catch (ClassNotFoundException e) {
            clazz = (Class<? extends StorageClassLoader>) Class.forName(CLASS_LOADER);
        }
        Constructor<? extends StorageClassLoader> constructor = clazz.getConstructor(ClassLoader.class,
                String.class, StorageType.class);
        storageClassLoader = constructor.newInstance(contextClassLoader, storageName, storageType);
        storageClassLoader.setDataSourceConfiguration(dataSource);
        storageClassLoader.generateHibernateConfig(); // Checks if configuration can be generated.
    } catch (Exception e) {
        throw new RuntimeException("Could not create storage class loader", e); //$NON-NLS-1$
    }
    if (dropExistingData) {
        LOGGER.info("Cleaning existing database content."); //$NON-NLS-1$
        StorageCleaner cleaner = new JDBCStorageCleaner(new FullTextIndexCleaner());
        cleaner.clean(this);
    } else {
        LOGGER.info("*NOT* cleaning existing database content."); //$NON-NLS-1$
    }
    if (autoPrepare) {
        LOGGER.info("Preparing database before schema generation."); //$NON-NLS-1$
        StorageInitializer initializer = new JDBCStorageInitializer();
        if (initializer.supportInitialization(this)) {
            if (!initializer.isInitialized(this)) {
                initializer.initialize(this);
            } else {
                LOGGER.info("Database is already prepared."); //$NON-NLS-1$
            }
        } else {
            LOGGER.info("Datasource is not configured for automatic initialization."); //$NON-NLS-1$
        }
    } else {
        LOGGER.info("*NOT* preparing database before schema generation."); //$NON-NLS-1$
    }
    try {
        Thread.currentThread().setContextClassLoader(storageClassLoader);
        // Mapping of data model types to RDBMS (i.e. 'flatten' representation of types).
        MetadataRepository internalRepository;
        try {
            InternalRepository typeEnhancer = getTypeEnhancer();
            internalRepository = userMetadataRepository.accept(typeEnhancer);
            mappingRepository = typeEnhancer.getMappings();
        } catch (Exception e) {
            throw new RuntimeException("Exception occurred during type mapping creation.", e); //$NON-NLS-1$
        }
        // Set fields to be indexed in database.
        Set<FieldMetadata> databaseIndexedFields = new HashSet<FieldMetadata>();
        switch (storageType) {
        case MASTER:
            // Adds indexes on user defined fields
            for (Expression optimizedExpression : optimizedExpressions) {
                Collection<FieldMetadata> indexedFields = RecommendedIndexes.get(optimizedExpression);
                for (FieldMetadata indexedField : indexedFields) {
                    // TMDM-5896: Don't index Composite Key fields
                    if (indexedField instanceof CompoundFieldMetadata) {
                        continue;
                    }
                    // TMDM-5311: Don't index TEXT fields
                    TypeMetadata indexedFieldType = indexedField.getType();
                    if (!isIndexable(indexedFieldType)) {
                        if (LOGGER.isDebugEnabled()) {
                            LOGGER.debug("Ignore index on field '" + indexedField.getName() //$NON-NLS-1$
                                    + "' because value is stored in TEXT."); //$NON-NLS-1$
                        }
                        continue;
                    }
                    // Go up the containment tree in case containing type is anonymous.
                    ComplexTypeMetadata containingType = indexedField.getContainingType().getEntity();
                    TypeMapping mapping = mappingRepository.getMappingFromUser(containingType);
                    FieldMetadata databaseField = mapping.getDatabase(indexedField);
                    if (databaseField == null) {
                        LOGGER.error("Could not index field '" + indexedField + "' (" + indexedField.getPath() //$NON-NLS-1$ //$NON-NLS-2$
                                + "), ignoring index."); //$NON-NLS-1$
                        continue;
                    } else if (!isIndexable(databaseField.getType())) {
                        if (LOGGER.isDebugEnabled()) {
                            LOGGER.debug("Ignore index on field '" + indexedField.getName() //$NON-NLS-1$
                                    + "' because value (in database mapping) is stored in TEXT."); //$NON-NLS-1$
                        }
                        continue; // Don't take into indexed fields long text fields
                    }
                    // Database specific behaviors
                    switch (dataSource.getDialectName()) {
                    case SQL_SERVER:
                        // TMDM-8144: Don't index field name on SQL Server when size > 900
                        String maxLengthStr = indexedField.getType()
                                .<String>getData(MetadataRepository.DATA_MAX_LENGTH);
                        if (maxLengthStr == null) { // go up the type inheritance tree to find max length annotation
                            TypeMetadata type = indexedField.getType();
                            while (!XMLConstants.W3C_XML_SCHEMA_NS_URI.equals(type.getNamespace())
                                    && !type.getSuperTypes().isEmpty()) {
                                type = type.getSuperTypes().iterator().next();
                                maxLengthStr = type.<String>getData(MetadataRepository.DATA_MAX_LENGTH);
                                if (maxLengthStr != null) {
                                    break;
                                }
                            }
                        }
                        if (maxLengthStr != null) {
                            Integer maxLength = Integer.parseInt(maxLengthStr);
                            if (maxLength > 900) {
                                LOGGER.warn("Skip index on field '" + indexedField.getPath() //$NON-NLS-1$
                                        + "' (too long value)."); //$NON-NLS-1$
                                continue;
                            }
                        }
                        break;
                    case H2:
                    case MYSQL:
                    case POSTGRES:
                    case DB2:
                    case ORACLE_10G:
                    default:
                        // Nothing to do for these databases
                        break;
                    }
                    databaseIndexedFields.add(databaseField);
                    if (!databaseField.getContainingType().isInstantiable()) {
                        Collection<ComplexTypeMetadata> roots = RecommendedIndexes
                                .getRoots(optimizedExpression);
                        for (ComplexTypeMetadata root : roots) {
                            List<FieldMetadata> path = StorageMetadataUtils.path(
                                    mappingRepository.getMappingFromUser(root).getDatabase(), databaseField);
                            if (path.size() > 1) {
                                databaseIndexedFields.addAll(path.subList(0, path.size() - 1));
                            } else {
                                LOGGER.warn("Failed to properly index field '" + databaseField + "'."); //$NON-NLS-1$ //$NON-NLS-2$
                            }
                        }
                    }
                }
            }
            break;
        case STAGING:
            if (!optimizedExpressions.isEmpty()) {
                if (LOGGER.isDebugEnabled()) {
                    LOGGER.debug("Ignoring " + optimizedExpressions.size() //$NON-NLS-1$
                            + " to optimize (disabled on staging area)."); //$NON-NLS-1$
                }
            }
            // Adds "staging status" / "staging block key" / "staging task id" as indexed fields
            for (TypeMapping typeMapping : mappingRepository.getAllTypeMappings()) {
                ComplexTypeMetadata database = typeMapping.getDatabase();
                if (database.hasField(METADATA_STAGING_STATUS)) {
                    databaseIndexedFields.add(database.getField(METADATA_STAGING_STATUS));
                }
                if (database.hasField(METADATA_STAGING_BLOCK_KEY)) {
                    databaseIndexedFields.add(database.getField(METADATA_STAGING_BLOCK_KEY));
                }
                if (database.hasField(METADATA_TASK_ID)) {
                    databaseIndexedFields.add(database.getField(METADATA_TASK_ID));
                }
            }
            break;
        case SYSTEM: // Nothing to index on SYSTEM
            break;
        }
        // Don't add FK in indexes if using H2
        if (dataSource.getDialectName() == RDBMSDataSource.DataSourceDialect.H2) {
            Iterator<FieldMetadata> indexedFields = databaseIndexedFields.iterator();
            while (indexedFields.hasNext()) {
                FieldMetadata field = indexedFields.next();
                if (field instanceof ReferenceFieldMetadata || field.isKey()) {
                    indexedFields.remove(); // H2 doesn't like indexes on PKs or FKs.
                }
            }
        }
        switch (dataSource.getDialectName()) {
        case ORACLE_10G:
            tableResolver = new OracleStorageTableResolver(databaseIndexedFields,
                    dataSource.getNameMaxLength());
            break;
        default:
            tableResolver = new StorageTableResolver(databaseIndexedFields, dataSource.getNameMaxLength());
        }
        storageClassLoader.setTableResolver(tableResolver);
        // Master, Staging and System share same class creator.
        switch (storageType) {
        case MASTER:
        case STAGING:
        case SYSTEM:
            hibernateClassCreator = new ClassCreator(storageClassLoader);
            break;
        }
        // Create Hibernate classes (after some modifications to the types).
        try {
            internalRepository.accept(hibernateClassCreator);
        } catch (Exception e) {
            throw new RuntimeException("Exception occurred during dynamic classes creation.", e); //$NON-NLS-1$
        }
        // Last step: configuration of Hibernate
        try {
            // Hibernate needs to have dynamic classes in context class loader during configuration.
            InputStream ehCacheConfig = storageClassLoader
                    .getResourceAsStream(StorageClassLoader.EHCACHE_XML_CONFIG);
            if (ehCacheConfig != null) {
                CacheManager.create(ehCacheConfig);
            }
            configuration.configure(StorageClassLoader.HIBERNATE_CONFIG);
            batchSize = Integer.parseInt(configuration.getProperty(Environment.STATEMENT_BATCH_SIZE));
            // Sets default schema for Oracle
            Properties properties = configuration.getProperties();
            if (dataSource.getDialectName() == RDBMSDataSource.DataSourceDialect.ORACLE_10G) {
                properties.setProperty(Environment.DEFAULT_SCHEMA, dataSource.getUserName());
            }
            // Logs DDL *before* initialization in case initialization fails (useful for debugging).
            if (LOGGER.isTraceEnabled()) {
                traceDDL();
            }
            // Customize schema generation according to datasource content.
            RDBMSDataSource.SchemaGeneration schemaGeneration = dataSource.getSchemaGeneration();
            List exceptions = Collections.emptyList();
            switch (schemaGeneration) {
            case CREATE:
                SchemaExport schemaExport = new SchemaExport(configuration);
                schemaExport.create(false, true);
                // Exception may happen during recreation (hibernate may perform statements on tables that does
                // not exist): these exceptions are supposed to be harmless (but log them to DEBUG just in case).
                if (LOGGER.isDebugEnabled()) {
                    LOGGER.debug("Exception(s) occurred during schema creation:"); //$NON-NLS-1$
                    for (Object exceptionObject : schemaExport.getExceptions()) {
                        LOGGER.debug(((Exception) exceptionObject).getMessage());
                    }
                }
                break;
            case VALIDATE:
                SchemaValidator schemaValidator = new SchemaValidator(configuration);
                schemaValidator.validate(); // This is supposed to throw exception on validation issue.
                break;
            case UPDATE:
                SchemaUpdate schemaUpdate = new SchemaUpdate(configuration);
                schemaUpdate.execute(false, true);
                exceptions = schemaUpdate.getExceptions();
                break;
            }
            // Throw an exception if schema update met issue(s).
            if (!exceptions.isEmpty()) {
                StringBuilder sb = new StringBuilder();
                sb.append("Could not prepare database schema: "); //$NON-NLS-1$
                Iterator iterator = exceptions.iterator();
                while (iterator.hasNext()) {
                    Exception exception = (Exception) iterator.next();
                    if (exception instanceof SQLException) {
                        SQLException currentSQLException = (SQLException) exception;
                        while (currentSQLException != null) {
                            sb.append(currentSQLException.getMessage());
                            sb.append('\n');
                            currentSQLException = currentSQLException.getNextException();
                        }
                    } else if (exception != null) {
                        sb.append(exception.getMessage());
                    }
                    if (iterator.hasNext()) {
                        sb.append('\n');
                    }
                }
                throw new IllegalStateException(sb.toString());
            }
            // Initialize Hibernate
            Environment.verifyProperties(properties);
            ConfigurationHelper.resolvePlaceHolders(properties);
            ServiceRegistry serviceRegistry = new StandardServiceRegistryBuilder().applySettings(properties)
                    .build();
            factory = configuration.buildSessionFactory(serviceRegistry);
            MDMTransactionSessionContext.declareStorage(this, factory);
        } catch (Exception e) {
            throw new RuntimeException("Exception occurred during Hibernate initialization.", e); //$NON-NLS-1$
        }
        // All set: set prepared flag to true.
        isPrepared = true;
        LOGGER.info("Storage '" + storageName + "' (" + storageType + ") is ready."); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
    } catch (Throwable t) {
        try {
            // This prevent PermGen OOME in case of multiple failures to start.
            close();
        } catch (Exception e) {
            if (LOGGER.isDebugEnabled()) {
                LOGGER.debug("Error occurred during clean up following failed prepare", e); //$NON-NLS-1$
            }
        }
        throw new RuntimeException("Could not prepare '" + storageName + "'.", t); //$NON-NLS-1$ //$NON-NLS-2$
    } finally {
        Thread.currentThread().setContextClassLoader(contextClassLoader);
    }
}

From source file:com.amalto.core.storage.hibernate.HibernateStorage.java

License:Open Source License

private void traceDDL() {
    try {/*from w  w  w  . j av a2s .c o  m*/
        if (configuration == null) {
            throw new IllegalStateException("Expect a Hibernate configuration to be set."); //$NON-NLS-1$
        }
        String jbossServerTempDir = System.getProperty("java.io.tmpdir"); //$NON-NLS-1$
        RDBMSDataSource.DataSourceDialect dialectType = dataSource.getDialectName();
        SchemaExport export = new SchemaExport(configuration);
        export.setFormat(false);
        String filename = jbossServerTempDir + File.separator + storageName + "_" + storageType + "_" //$NON-NLS-1$//$NON-NLS-2$
                + dialectType + ".ddl"; //$NON-NLS-1$
        export.setOutputFile(filename);
        export.setDelimiter(";"); //$NON-NLS-1$
        export.execute(false, false, false, true);
        if (export.getExceptions().size() > 0) {
            for (int i = 0; i < export.getExceptions().size(); i++) {
                LOGGER.error("Error occurred while producing ddl.", //$NON-NLS-1$
                        (Exception) export.getExceptions().get(i));
            }
        }
        LOGGER.info("DDL exported to file '" + filename + "'."); //$NON-NLS-1$ //$NON-NLS-2$
    } catch (Exception e) {
        LOGGER.error("Error occurred while producing ddl.", e); //$NON-NLS-1$
    }
}

From source file:com.ironiacorp.persistence.hibernate.GenericHibernateDataSource.java

License:Open Source License

/**
 * Check the exceptions thrown while running the update or create
 * scripts for the current Hibernate configuration.
 *
 * @param ddl The SchemaExport or SchemaUpdate used.
 * @throws IllegalArgumentException If the argument is not a SchemaExport or
 * SchemaUpdate instance.//from   ww w  .  j a va 2s .co  m
 */
private List handleExceptions(Object ddl) {
    if (!(ddl instanceof SchemaExport ^ ddl instanceof SchemaUpdate)) {
        throw new IllegalArgumentException();
    }

    List exceptions = null;
    List<Exception> errors = new LinkedList<Exception>();

    if (ddl instanceof SchemaExport) {
        SchemaExport schema = (SchemaExport) ddl;
        exceptions = schema.getExceptions();
    } else {
        SchemaUpdate schema = (SchemaUpdate) ddl;
        exceptions = schema.getExceptions();
    }

    if (!exceptions.isEmpty()) {
        for (Object o : exceptions) {
            Exception e = (Exception) o;

            if (e instanceof SQLException) {
                // Filter exceptions.
                String dialect = hibernateConfig.getProperty("dialect");
                if (dialect.equals("org.hibernate.dialect.MySQLDialect")
                        || dialect.equals("org.hibernate.dialect.MySQLInnoDBDialect")
                        || dialect.equals("org.hibernate.dialect.MySQLMyISAMDialect")) {
                    e = handleMySQLError((SQLException) e);
                }
            }

            if (e != null) {
                errors.add(e);
            }
        }
    }
    return errors;
}

From source file:com.ironiacorp.persistence.hibernate.GenericHibernateDataSource.java

License:Open Source License

/**
 * Create the database.//from w  w  w  . jav a2s  .  c  o m
 * 
 * @throws RuntimeException If an error is found when running the DDL script.
 */
public void createDB() {
    log.debug("Creating the database");
    log.debug(getCreateDDLScript());
    SchemaExport ddl = new SchemaExport(hibernateConfig);
    List exceptions = null;

    ddl.create(false, true);
    exceptions = handleExceptions(ddl);
    if (!exceptions.isEmpty()) {
        throw new RuntimeException("exception.bootstrap.createdb", (Exception) ddl.getExceptions().get(0));
    }
}

From source file:com.ironiacorp.persistence.hibernate.GenericHibernateDataSource.java

License:Open Source License

/**
 * Drop the entire database.//from  w ww .ja v a2 s .  c  om
 * 
 * @throws RuntimeException If an error is found when running the DDL script.
 */
public void dropDB() {
    log.debug("Dropping the database");
    log.debug(getDropDDLScript());
    SchemaExport ddl = new SchemaExport(hibernateConfig);
    List exceptions = null;

    ddl.drop(false, true);
    exceptions = handleExceptions(ddl);
    if (!exceptions.isEmpty()) {
        throw new RuntimeException("exception.bootstrap.dropdb", (Exception) ddl.getExceptions().get(0));
    }
}

From source file:com.ironiacorp.persistence.hibernate.HibernateBootstrap.java

License:Open Source License

/**
 * Check the exceptions thrown while running the update or create scripts
 * for the current Hibernate configuration.
 * //from www  . ja v  a  2 s  . c  om
 * @param ddl
 *            The SchemaExport or SchemaUpdate used.
 * @throws IllegalArgumentException
 *             If the argument is not a SchemaExport or SchemaUpdate
 *             instance.
 */
@SuppressWarnings("unchecked")
private List<Exception> handleExceptions(Object ddl) {
    if (!(ddl instanceof SchemaExport ^ ddl instanceof SchemaUpdate)) {
        throw new IllegalArgumentException();
    }

    List<Exception> exceptions = null;
    List<Exception> errors = new LinkedList<Exception>();

    if (ddl instanceof SchemaExport) {
        SchemaExport schema = (SchemaExport) ddl;
        exceptions = schema.getExceptions();
    } else {
        SchemaUpdate schema = (SchemaUpdate) ddl;
        exceptions = schema.getExceptions();
    }

    if (!exceptions.isEmpty()) {
        for (Object o : exceptions) {
            Exception e = (Exception) o;
            SQLException sqle = null;
            String dialect = null;

            log.debug(e);

            if (e instanceof SQLException) {
                sqle = (SQLException) e;
                log.debug("SQL error code: " + sqle.getErrorCode());
                log.debug("SQL state: " + sqle.getSQLState());

                // Filter exceptions.
                dialect = config.getProperty("dialect");
                if (dialect.equals("org.hibernate.dialect.MySQLDialect")
                        || dialect.equals("org.hibernate.dialect.MySQLInnoDBDialect")
                        || dialect.equals("org.hibernate.dialect.MySQLMyISAMDialect")) {
                    e = handleMySQLError(sqle);
                }
            }

            if (e != null) {
                errors.add(e);
            }
        }
    }
    return errors;
}

From source file:com.ironiacorp.persistence.hibernate.HibernateBootstrap.java

License:Open Source License

/**
 * Create the database.//  ww w .  ja va2 s  .c  o  m
 * 
 * @throws RuntimeException
 *             If an error is found when running the DDL script.
 */
public void createDB() {
    log.debug("Creating the database");
    log.debug(getCreateDDLScript());
    SchemaExport ddl = new SchemaExport(config);
    List<Exception> exceptions = null;

    ddl.create(false, true);
    exceptions = handleExceptions(ddl);
    if (!exceptions.isEmpty()) {
        throw new RuntimeException("exception.bootstrap.createdb", (Exception) ddl.getExceptions().get(0));
    }
}

From source file:com.ironiacorp.persistence.hibernate.HibernateBootstrap.java

License:Open Source License

/**
 * Drop the entire database.//www  .  j  a v a2 s .c  o m
 * 
 * @throws RuntimeException
 *             If an error is found when running the DDL script.
 */
public void dropDB() {
    log.debug("Dropping the database");
    log.debug(getDropDDLScript());
    SchemaExport ddl = new SchemaExport(config);
    List<Exception> exceptions = null;

    ddl.drop(false, true);
    exceptions = handleExceptions(ddl);
    if (!exceptions.isEmpty()) {
        throw new RuntimeException("exception.bootstrap.dropdb", (Exception) ddl.getExceptions().get(0));
    }
}

From source file:com.wavemaker.tools.data.ExportDB.java

License:Open Source License

@Override
protected void customRun() {

    init();/*from   w  w w. ja va2s  .  co  m*/

    final Configuration cfg = new Configuration();

    // cfg.addDirectory(this.hbmFilesDir);

    this.hbmFilesDir.find().files().performOperation(new ResourceOperation<com.wavemaker.tools.io.File>() {

        @Override
        public void perform(com.wavemaker.tools.io.File file) {
            if (file.getName().endsWith(".hbm.xml")) {
                cfg.addInputStream(file.getContent().asInputStream());
            }
        }
    });

    Properties connectionProperties = getHibernateConnectionProperties();

    cfg.addProperties(connectionProperties);

    SchemaExport export = null;
    SchemaUpdate update = null;
    File ddlFile = null;

    try {
        if (this.overrideTable) {
            Callable<SchemaExport> t = new Callable<SchemaExport>() {

                @Override
                public SchemaExport call() {
                    return new SchemaExport(cfg);
                }
            };

            if (this.classesDir == null) {
                try {
                    export = t.call();
                } catch (Exception e) {
                    ReflectionUtils.rethrowRuntimeException(e);
                }
            } else {
                export = ResourceClassLoaderUtils.runInClassLoaderContext(true, t, this.classesDir);
            }

            ddlFile = File.createTempFile("ddl", ".sql");
            ddlFile.deleteOnExit();

            export.setOutputFile(ddlFile.getAbsolutePath());
            export.setDelimiter(";");
            export.setFormat(true);

            String extraddl = prepareForExport(this.exportToDatabase);

            export.create(this.verbose, this.exportToDatabase);

            this.errors = CastUtils.cast(export.getExceptions());
            this.errors = filterError(this.errors, connectionProperties);

            this.ddl = IOUtils.read(ddlFile);

            if (!ObjectUtils.isNullOrEmpty(extraddl)) {
                this.ddl = extraddl + "\n" + this.ddl;
            }
        } else {
            Callable<SchemaUpdate> t = new Callable<SchemaUpdate>() {

                @Override
                public SchemaUpdate call() {
                    return new SchemaUpdate(cfg);
                }
            };

            if (this.classesDir == null) {
                try {
                    update = t.call();
                } catch (Exception e) {
                    ReflectionUtils.rethrowRuntimeException(e);
                }
            } else {
                update = ResourceClassLoaderUtils.runInClassLoaderContext(t, this.classesDir);
            }

            prepareForExport(this.exportToDatabase);

            Connection conn = JDBCUtils.getConnection(this.connectionUrl.toString(), this.username,
                    this.password, this.driverClassName);

            Dialect dialect = Dialect.getDialect(connectionProperties);

            DatabaseMetadata meta = new DatabaseMetadata(conn, dialect);

            String[] updateSQL = cfg.generateSchemaUpdateScript(dialect, meta);

            update.execute(this.verbose, this.exportToDatabase);

            this.errors = CastUtils.cast(update.getExceptions());
            StringBuilder sb = new StringBuilder();
            for (String line : updateSQL) {
                sb = sb.append(line);
                sb = sb.append("\n");
            }
            this.ddl = sb.toString();

        }
    } catch (IOException ex) {
        throw new DataServiceRuntimeException(ex);
    } catch (SQLException qex) {
        throw new DataServiceRuntimeException(qex);
    } catch (RuntimeException rex) {
        if (rex.getCause() != null && rex.getCause().getMessage().contains(NO_SUITABLE_DRIVER)
                && WMAppContext.getInstance().isCloudFoundry()) {
            String msg = rex.getMessage() + " - " + UNKNOWN_DATABASE;
            throw new DataServiceRuntimeException(msg);
        } else {
            throw new DataServiceRuntimeException(rex);
        }
    } finally {
        try {
            ddlFile.delete();
        } catch (Exception ignore) {
        }
    }
}