Example usage for org.hibernate.tool.hbm2ddl SchemaExport create

List of usage examples for org.hibernate.tool.hbm2ddl SchemaExport create

Introduction

In this page you can find the example usage for org.hibernate.tool.hbm2ddl SchemaExport create.

Prototype

public void create(EnumSet<TargetType> targetTypes, Metadata metadata) 

Source Link

Usage

From source file:br.com.machina.verbum.Main.java

License:Apache License

private static void generateTables() {

    AnnotationConfiguration configuration = new AnnotationConfiguration();
    configuration.configure();//  w  w w. j  a va 2s . co m

    SchemaExport export = new SchemaExport(configuration);
    export.setDelimiter(";");
    export.setOutputFile("tables.sql");
    export.create(true, true);
    System.out.println("Ok!");

}

From source file:br.com.petslive.dao.DaoManagerHiber.java

public static void main(String args[]) {
    SchemaExport se = new SchemaExport(new AnnotationConfiguration().configure());
    se.create(true, true);
}

From source file:br.edu.ifpe.garanhuns.ppo.pep.models.persistence.management.DaoManagerHiberRever.java

public static void main(String[] args) {
    SchemaExport se = new SchemaExport(new Configuration().configure());
    se.create(true, true);
}

From source file:ca.myewb.build.CreateDb.java

License:Open Source License

private static void createDb(String postfix) {
    try {//from  w w w.j  a va 2s  .  c  o m
        System.out.println("Creating fresh database");

        Configuration config = HibernateUtil.getConfiguration(postfix);

        // Set up the schema exporter utility
        SchemaExport sch = new SchemaExport(config);
        sch = sch.setDelimiter(";");

        // Drop and re-create the database
        sch.drop(false, true);
        sch.create(false, true);
    } catch (Exception e) {
        System.err.print("Exception: " + e);
        e.printStackTrace();
    }
}

From source file:ch.qos.logback.audit.server.TableCreator.java

License:Open Source License

public void createTables(String filename) {
    SchemaExport schemaExport = new SchemaExport(cfg);
    boolean printDLL = false;
    if (filename != null) {
        schemaExport.setOutputFile(filename);
        printDLL = true;/* w ww .  jav a  2s .  co  m*/
    }

    schemaExport.create(printDLL, true);

}

From source file:com.aan.girsang.server.launcher.GenerateDatabase.java

public static void main(String[] args) throws SQLException {
    AbstractApplicationContext ctx = new ClassPathXmlApplicationContext("applicationContext.xml");

    DataSource dataSource = (DataSource) ctx.getBean("dataSource");

    Configuration cfg = new AnnotationConfiguration().configure("hibernate.cfg.xml")
            .setProperty("hibernate.dialect", "org.hibernate.dialect.H2Dialect");

    try (Connection conn = dataSource.getConnection()) {
        new SchemaExport(cfg, conn).create(true, true);

        cfg.generateSchemaCreationScript(Dialect.getDialect(cfg.getProperties()));
        SchemaExport export = new SchemaExport(cfg, conn);
        export.create(true, true);

        conn.close();//from w  w  w . jav  a  2 s . c o  m

    }
    ctx.registerShutdownHook();

}

From source file:com.aangirsang.girsang.toko.GenerateDatabase.java

public static void main(String[] args) throws SQLException {
    AbstractApplicationContext ctx = new ClassPathXmlApplicationContext("classpath:applicationContext.xml");

    DataSource dataSource = (DataSource) ctx.getBean("dataSource");

    Configuration cfg = new AnnotationConfiguration().configure("hibernate.cfg.xml")
            .setProperty("hibernate.dialect", "org.hibernate.dialect.H2Dialect");

    try (Connection conn = dataSource.getConnection()) {
        new SchemaExport(cfg, conn).create(true, true);

        cfg.generateSchemaCreationScript(Dialect.getDialect(cfg.getProperties()));
        SchemaExport export = new SchemaExport(cfg, conn);
        export.create(true, true);

        conn.close();//from w ww .j a va2  s .c  om

    }
    ctx.registerShutdownHook();

}

From source file:com.abcanthur.website.codegenhack.JPADatabase.java

License:Apache License

@SuppressWarnings("serial")
@Override//from w  w w  .j  a v a  2  s.c o  m
protected DSLContext create0() {
    if (connection == null) {
        String packages = getProperties().getProperty("packages");

        if (isBlank(packages)) {
            packages = "";
            log.warn("No packages defined",
                    "It is highly recommended that you provide explicit packages to scan");
        }

        try {
            connection = DriverManager.getConnection("jdbc:h2:mem:jooq-meta-extensions", "sa", "");

            MetadataSources metadata = new MetadataSources(new StandardServiceRegistryBuilder()
                    .applySetting("hibernate.dialect", "org.hibernate.dialect.H2Dialect")
                    .applySetting("javax.persistence.schema-generation-connection", connection)

                    // [#5607] JPADatabase causes warnings - This prevents them
                    .applySetting(AvailableSettings.CONNECTION_PROVIDER,
                            "com.abcanthur.website.codegenhack.CustomConnectionProvider")
                    .build());

            ClassPathScanningCandidateComponentProvider scanner = new ClassPathScanningCandidateComponentProvider(
                    true);

            scanner.addIncludeFilter(new AnnotationTypeFilter(Entity.class));
            for (String pkg : packages.split(","))
                for (BeanDefinition def : scanner.findCandidateComponents(defaultIfBlank(pkg, "").trim()))
                    metadata.addAnnotatedClass(Class.forName(def.getBeanClassName()));

            // This seems to be the way to do this in idiomatic Hibernate 5.0 API
            // See also: http://stackoverflow.com/q/32178041/521799
            // SchemaExport export = new SchemaExport((MetadataImplementor) metadata.buildMetadata(), connection);
            // export.create(true, true);

            // Hibernate 5.2 broke 5.0 API again. Here's how to do this now:
            SchemaExport export = new SchemaExport();
            export.create(EnumSet.of(TargetType.DATABASE), metadata.buildMetadata());
        } catch (Exception e) {
            throw new DataAccessException("Error while exporting schema", e);
        }
    }

    return DSL.using(connection);
}

From source file:com.ah.util.HibernateUtil.java

public static void main(String[] args) {
    init(false);/*from w  w  w  .  j  ava2  s. co m*/
    System.out.println("Entered HibernateUtil.main");
    System.out.println("# arguments: " + Arrays.asList(args));
    System.out.println("Getting configuration.");

    if ("create".equals(args[0])) {
        if (args.length >= 2) {
            String newUrl = "jdbc:postgresql://localhost/" + args[1];
            configuration.setProperty("hibernate.connection.url", newUrl);

            if (args.length >= 3) {
                try {
                    int i = Integer.parseInt(args[2]);

                    if ((1 == i) && (BeOperateHMCentOSImpl.isExistHomeDomain())) {
                        System.out.println("have tables! need not recreate");

                        return;
                    }
                } catch (Exception ex) {
                    System.out.println(ex);
                }
            }
        }

        SchemaExport schemaExport = new SchemaExport(configuration);

        System.out.println("Creating schema ...");
        schemaExport.create(true, true);
        //   BeSqlProcedure.insertSqlProcedure();
        DBFunction.createHex2Int();
        DBFunction.createDBRollUp();
        DBFunction.createRepoRollUp();
        System.out.println("Create schema finished.");
    } else if ("export".equals(args[0])) {
        SchemaExport schemaExport = new SchemaExport(configuration);

        schemaExport.setOutputFile("schema.ddl");
        schemaExport.setDelimiter(";");
        System.out.println("Exporting schema ...");
        schemaExport.create(true, false);
        System.out.println("Export finished.");
    } else if ("drop".equals(args[0])) {
        SchemaExport schemaExport = new SchemaExport(configuration);

        System.out.println("Dropping schema ...");
        schemaExport.drop(true, true);
        System.out.println("Drop schema finished.");
    } else if ("reset".equals(args[0])) {
        //java HibernateUtil reset jdbc:postgresql://ip_address/db_name
        if (args.length >= 2) {
            configuration.setProperty("hibernate.connection.url", args[1]);
        }
        SchemaExport schemaExport = new SchemaExport(configuration);
        schemaExport.create(true, true);
        DBFunction.createHex2Int();
        DBFunction.createDBRollUp();
        DBFunction.createRepoRollUp();
        System.out.println("execute reset finished.");
    }

    close();
}

From source file:com.amalto.core.storage.hibernate.HibernateStorage.java

License:Open Source License

@SuppressWarnings({ "unchecked", "rawtypes" })
@Override/*from w  ww  .j ava  2s  .c o  m*/
public synchronized void prepare(MetadataRepository repository, Set<Expression> optimizedExpressions,
        boolean force, boolean dropExistingData) {
    if (!force && isPrepared) {
        return; // No op operation
    }
    if (isPrepared) {
        close();
        internalInit();
    }
    if (dataSource == null) {
        throw new IllegalArgumentException("Datasource is not set."); //$NON-NLS-1$
    }
    // No support for data models including inheritance AND for g* XSD simple types AND fields that start with
    // X_TALEND_
    try {
        repository.accept(METADATA_CHECKER);
        userMetadataRepository = repository;
    } catch (Exception e) {
        throw new RuntimeException("Exception occurred during unsupported features check.", e); //$NON-NLS-1$
    }
    // Loads additional types for staging area.
    if (storageType == StorageType.STAGING) {
        userMetadataRepository = repository.copy(); // See TMDM-6938: prevents staging types to appear in master
                                                    // storage.
        userMetadataRepository
                .load(MetadataRepositoryAdmin.class.getResourceAsStream("stagingInternalTypes.xsd")); //$NON-NLS-1$
    }
    // Create class loader for storage's dynamically created classes.
    ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
    Class<? extends StorageClassLoader> clazz;
    try {
        try {
            clazz = (Class<? extends StorageClassLoader>) Class.forName(ALTERNATE_CLASS_LOADER);
        } catch (ClassNotFoundException e) {
            clazz = (Class<? extends StorageClassLoader>) Class.forName(CLASS_LOADER);
        }
        Constructor<? extends StorageClassLoader> constructor = clazz.getConstructor(ClassLoader.class,
                String.class, StorageType.class);
        storageClassLoader = constructor.newInstance(contextClassLoader, storageName, storageType);
        storageClassLoader.setDataSourceConfiguration(dataSource);
        storageClassLoader.generateHibernateConfig(); // Checks if configuration can be generated.
    } catch (Exception e) {
        throw new RuntimeException("Could not create storage class loader", e); //$NON-NLS-1$
    }
    if (dropExistingData) {
        LOGGER.info("Cleaning existing database content."); //$NON-NLS-1$
        StorageCleaner cleaner = new JDBCStorageCleaner(new FullTextIndexCleaner());
        cleaner.clean(this);
    } else {
        LOGGER.info("*NOT* cleaning existing database content."); //$NON-NLS-1$
    }
    if (autoPrepare) {
        LOGGER.info("Preparing database before schema generation."); //$NON-NLS-1$
        StorageInitializer initializer = new JDBCStorageInitializer();
        if (initializer.supportInitialization(this)) {
            if (!initializer.isInitialized(this)) {
                initializer.initialize(this);
            } else {
                LOGGER.info("Database is already prepared."); //$NON-NLS-1$
            }
        } else {
            LOGGER.info("Datasource is not configured for automatic initialization."); //$NON-NLS-1$
        }
    } else {
        LOGGER.info("*NOT* preparing database before schema generation."); //$NON-NLS-1$
    }
    try {
        Thread.currentThread().setContextClassLoader(storageClassLoader);
        // Mapping of data model types to RDBMS (i.e. 'flatten' representation of types).
        MetadataRepository internalRepository;
        try {
            InternalRepository typeEnhancer = getTypeEnhancer();
            internalRepository = userMetadataRepository.accept(typeEnhancer);
            mappingRepository = typeEnhancer.getMappings();
        } catch (Exception e) {
            throw new RuntimeException("Exception occurred during type mapping creation.", e); //$NON-NLS-1$
        }
        // Set fields to be indexed in database.
        Set<FieldMetadata> databaseIndexedFields = new HashSet<FieldMetadata>();
        switch (storageType) {
        case MASTER:
            // Adds indexes on user defined fields
            for (Expression optimizedExpression : optimizedExpressions) {
                Collection<FieldMetadata> indexedFields = RecommendedIndexes.get(optimizedExpression);
                for (FieldMetadata indexedField : indexedFields) {
                    // TMDM-5896: Don't index Composite Key fields
                    if (indexedField instanceof CompoundFieldMetadata) {
                        continue;
                    }
                    // TMDM-5311: Don't index TEXT fields
                    TypeMetadata indexedFieldType = indexedField.getType();
                    if (!isIndexable(indexedFieldType)) {
                        if (LOGGER.isDebugEnabled()) {
                            LOGGER.debug("Ignore index on field '" + indexedField.getName() //$NON-NLS-1$
                                    + "' because value is stored in TEXT."); //$NON-NLS-1$
                        }
                        continue;
                    }
                    // Go up the containment tree in case containing type is anonymous.
                    ComplexTypeMetadata containingType = indexedField.getContainingType().getEntity();
                    TypeMapping mapping = mappingRepository.getMappingFromUser(containingType);
                    FieldMetadata databaseField = mapping.getDatabase(indexedField);
                    if (databaseField == null) {
                        LOGGER.error("Could not index field '" + indexedField + "' (" + indexedField.getPath() //$NON-NLS-1$ //$NON-NLS-2$
                                + "), ignoring index."); //$NON-NLS-1$
                        continue;
                    } else if (!isIndexable(databaseField.getType())) {
                        if (LOGGER.isDebugEnabled()) {
                            LOGGER.debug("Ignore index on field '" + indexedField.getName() //$NON-NLS-1$
                                    + "' because value (in database mapping) is stored in TEXT."); //$NON-NLS-1$
                        }
                        continue; // Don't take into indexed fields long text fields
                    }
                    // Database specific behaviors
                    switch (dataSource.getDialectName()) {
                    case SQL_SERVER:
                        // TMDM-8144: Don't index field name on SQL Server when size > 900
                        String maxLengthStr = indexedField.getType()
                                .<String>getData(MetadataRepository.DATA_MAX_LENGTH);
                        if (maxLengthStr == null) { // go up the type inheritance tree to find max length annotation
                            TypeMetadata type = indexedField.getType();
                            while (!XMLConstants.W3C_XML_SCHEMA_NS_URI.equals(type.getNamespace())
                                    && !type.getSuperTypes().isEmpty()) {
                                type = type.getSuperTypes().iterator().next();
                                maxLengthStr = type.<String>getData(MetadataRepository.DATA_MAX_LENGTH);
                                if (maxLengthStr != null) {
                                    break;
                                }
                            }
                        }
                        if (maxLengthStr != null) {
                            Integer maxLength = Integer.parseInt(maxLengthStr);
                            if (maxLength > 900) {
                                LOGGER.warn("Skip index on field '" + indexedField.getPath() //$NON-NLS-1$
                                        + "' (too long value)."); //$NON-NLS-1$
                                continue;
                            }
                        }
                        break;
                    case H2:
                    case MYSQL:
                    case POSTGRES:
                    case DB2:
                    case ORACLE_10G:
                    default:
                        // Nothing to do for these databases
                        break;
                    }
                    databaseIndexedFields.add(databaseField);
                    if (!databaseField.getContainingType().isInstantiable()) {
                        Collection<ComplexTypeMetadata> roots = RecommendedIndexes
                                .getRoots(optimizedExpression);
                        for (ComplexTypeMetadata root : roots) {
                            List<FieldMetadata> path = StorageMetadataUtils.path(
                                    mappingRepository.getMappingFromUser(root).getDatabase(), databaseField);
                            if (path.size() > 1) {
                                databaseIndexedFields.addAll(path.subList(0, path.size() - 1));
                            } else {
                                LOGGER.warn("Failed to properly index field '" + databaseField + "'."); //$NON-NLS-1$ //$NON-NLS-2$
                            }
                        }
                    }
                }
            }
            break;
        case STAGING:
            if (!optimizedExpressions.isEmpty()) {
                if (LOGGER.isDebugEnabled()) {
                    LOGGER.debug("Ignoring " + optimizedExpressions.size() //$NON-NLS-1$
                            + " to optimize (disabled on staging area)."); //$NON-NLS-1$
                }
            }
            // Adds "staging status" / "staging block key" / "staging task id" as indexed fields
            for (TypeMapping typeMapping : mappingRepository.getAllTypeMappings()) {
                ComplexTypeMetadata database = typeMapping.getDatabase();
                if (database.hasField(METADATA_STAGING_STATUS)) {
                    databaseIndexedFields.add(database.getField(METADATA_STAGING_STATUS));
                }
                if (database.hasField(METADATA_STAGING_BLOCK_KEY)) {
                    databaseIndexedFields.add(database.getField(METADATA_STAGING_BLOCK_KEY));
                }
                if (database.hasField(METADATA_TASK_ID)) {
                    databaseIndexedFields.add(database.getField(METADATA_TASK_ID));
                }
            }
            break;
        case SYSTEM: // Nothing to index on SYSTEM
            break;
        }
        // Don't add FK in indexes if using H2
        if (dataSource.getDialectName() == RDBMSDataSource.DataSourceDialect.H2) {
            Iterator<FieldMetadata> indexedFields = databaseIndexedFields.iterator();
            while (indexedFields.hasNext()) {
                FieldMetadata field = indexedFields.next();
                if (field instanceof ReferenceFieldMetadata || field.isKey()) {
                    indexedFields.remove(); // H2 doesn't like indexes on PKs or FKs.
                }
            }
        }
        switch (dataSource.getDialectName()) {
        case ORACLE_10G:
            tableResolver = new OracleStorageTableResolver(databaseIndexedFields,
                    dataSource.getNameMaxLength());
            break;
        default:
            tableResolver = new StorageTableResolver(databaseIndexedFields, dataSource.getNameMaxLength());
        }
        storageClassLoader.setTableResolver(tableResolver);
        // Master, Staging and System share same class creator.
        switch (storageType) {
        case MASTER:
        case STAGING:
        case SYSTEM:
            hibernateClassCreator = new ClassCreator(storageClassLoader);
            break;
        }
        // Create Hibernate classes (after some modifications to the types).
        try {
            internalRepository.accept(hibernateClassCreator);
        } catch (Exception e) {
            throw new RuntimeException("Exception occurred during dynamic classes creation.", e); //$NON-NLS-1$
        }
        // Last step: configuration of Hibernate
        try {
            // Hibernate needs to have dynamic classes in context class loader during configuration.
            InputStream ehCacheConfig = storageClassLoader
                    .getResourceAsStream(StorageClassLoader.EHCACHE_XML_CONFIG);
            if (ehCacheConfig != null) {
                CacheManager.create(ehCacheConfig);
            }
            configuration.configure(StorageClassLoader.HIBERNATE_CONFIG);
            batchSize = Integer.parseInt(configuration.getProperty(Environment.STATEMENT_BATCH_SIZE));
            // Sets default schema for Oracle
            Properties properties = configuration.getProperties();
            if (dataSource.getDialectName() == RDBMSDataSource.DataSourceDialect.ORACLE_10G) {
                properties.setProperty(Environment.DEFAULT_SCHEMA, dataSource.getUserName());
            }
            // Logs DDL *before* initialization in case initialization fails (useful for debugging).
            if (LOGGER.isTraceEnabled()) {
                traceDDL();
            }
            // Customize schema generation according to datasource content.
            RDBMSDataSource.SchemaGeneration schemaGeneration = dataSource.getSchemaGeneration();
            List exceptions = Collections.emptyList();
            switch (schemaGeneration) {
            case CREATE:
                SchemaExport schemaExport = new SchemaExport(configuration);
                schemaExport.create(false, true);
                // Exception may happen during recreation (hibernate may perform statements on tables that does
                // not exist): these exceptions are supposed to be harmless (but log them to DEBUG just in case).
                if (LOGGER.isDebugEnabled()) {
                    LOGGER.debug("Exception(s) occurred during schema creation:"); //$NON-NLS-1$
                    for (Object exceptionObject : schemaExport.getExceptions()) {
                        LOGGER.debug(((Exception) exceptionObject).getMessage());
                    }
                }
                break;
            case VALIDATE:
                SchemaValidator schemaValidator = new SchemaValidator(configuration);
                schemaValidator.validate(); // This is supposed to throw exception on validation issue.
                break;
            case UPDATE:
                SchemaUpdate schemaUpdate = new SchemaUpdate(configuration);
                schemaUpdate.execute(false, true);
                exceptions = schemaUpdate.getExceptions();
                break;
            }
            // Throw an exception if schema update met issue(s).
            if (!exceptions.isEmpty()) {
                StringBuilder sb = new StringBuilder();
                sb.append("Could not prepare database schema: "); //$NON-NLS-1$
                Iterator iterator = exceptions.iterator();
                while (iterator.hasNext()) {
                    Exception exception = (Exception) iterator.next();
                    if (exception instanceof SQLException) {
                        SQLException currentSQLException = (SQLException) exception;
                        while (currentSQLException != null) {
                            sb.append(currentSQLException.getMessage());
                            sb.append('\n');
                            currentSQLException = currentSQLException.getNextException();
                        }
                    } else if (exception != null) {
                        sb.append(exception.getMessage());
                    }
                    if (iterator.hasNext()) {
                        sb.append('\n');
                    }
                }
                throw new IllegalStateException(sb.toString());
            }
            // Initialize Hibernate
            Environment.verifyProperties(properties);
            ConfigurationHelper.resolvePlaceHolders(properties);
            ServiceRegistry serviceRegistry = new StandardServiceRegistryBuilder().applySettings(properties)
                    .build();
            factory = configuration.buildSessionFactory(serviceRegistry);
            MDMTransactionSessionContext.declareStorage(this, factory);
        } catch (Exception e) {
            throw new RuntimeException("Exception occurred during Hibernate initialization.", e); //$NON-NLS-1$
        }
        // All set: set prepared flag to true.
        isPrepared = true;
        LOGGER.info("Storage '" + storageName + "' (" + storageType + ") is ready."); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
    } catch (Throwable t) {
        try {
            // This prevent PermGen OOME in case of multiple failures to start.
            close();
        } catch (Exception e) {
            if (LOGGER.isDebugEnabled()) {
                LOGGER.debug("Error occurred during clean up following failed prepare", e); //$NON-NLS-1$
            }
        }
        throw new RuntimeException("Could not prepare '" + storageName + "'.", t); //$NON-NLS-1$ //$NON-NLS-2$
    } finally {
        Thread.currentThread().setContextClassLoader(contextClassLoader);
    }
}