Example usage for org.hibernate.tool.hbm2ddl SchemaExport SchemaExport

List of usage examples for org.hibernate.tool.hbm2ddl SchemaExport SchemaExport

Introduction

In this page you can find the example usage for org.hibernate.tool.hbm2ddl SchemaExport SchemaExport.

Prototype

SchemaExport

Source Link

Usage

From source file:com.abcanthur.website.codegenhack.JPADatabase.java

License:Apache License

@SuppressWarnings("serial")
@Override//w ww . ja  va  2s. c  om
protected DSLContext create0() {
    if (connection == null) {
        String packages = getProperties().getProperty("packages");

        if (isBlank(packages)) {
            packages = "";
            log.warn("No packages defined",
                    "It is highly recommended that you provide explicit packages to scan");
        }

        try {
            connection = DriverManager.getConnection("jdbc:h2:mem:jooq-meta-extensions", "sa", "");

            MetadataSources metadata = new MetadataSources(new StandardServiceRegistryBuilder()
                    .applySetting("hibernate.dialect", "org.hibernate.dialect.H2Dialect")
                    .applySetting("javax.persistence.schema-generation-connection", connection)

                    // [#5607] JPADatabase causes warnings - This prevents them
                    .applySetting(AvailableSettings.CONNECTION_PROVIDER,
                            "com.abcanthur.website.codegenhack.CustomConnectionProvider")
                    .build());

            ClassPathScanningCandidateComponentProvider scanner = new ClassPathScanningCandidateComponentProvider(
                    true);

            scanner.addIncludeFilter(new AnnotationTypeFilter(Entity.class));
            for (String pkg : packages.split(","))
                for (BeanDefinition def : scanner.findCandidateComponents(defaultIfBlank(pkg, "").trim()))
                    metadata.addAnnotatedClass(Class.forName(def.getBeanClassName()));

            // This seems to be the way to do this in idiomatic Hibernate 5.0 API
            // See also: http://stackoverflow.com/q/32178041/521799
            // SchemaExport export = new SchemaExport((MetadataImplementor) metadata.buildMetadata(), connection);
            // export.create(true, true);

            // Hibernate 5.2 broke 5.0 API again. Here's how to do this now:
            SchemaExport export = new SchemaExport();
            export.create(EnumSet.of(TargetType.DATABASE), metadata.buildMetadata());
        } catch (Exception e) {
            throw new DataAccessException("Error while exporting schema", e);
        }
    }

    return DSL.using(connection);
}

From source file:com.evolveum.midpoint.repo.sql.SchemaTest.java

License:Apache License

private void createSQLSchema(String fileName, String dialect) {
    File file = new File(fileName);
    if (file.exists()) {
        file.delete();/*from  w w w  .  ja v a 2  s  .  c  o m*/
    }

    MetadataSources metadata = new MetadataSources(new StandardServiceRegistryBuilder()
            .applySetting("hibernate.implicit_naming_strategy", new MidPointImplicitNamingStrategy())
            .applySetting("hibernate.physical_naming_strategy", new MidPointPhysicalNamingStrategy())
            .applySetting("hibernate.dialect", dialect).build());

    addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common", metadata);
    addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common.container", metadata);
    addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common.any", metadata);
    addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common.embedded", metadata);
    addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common.enums", metadata);
    addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common.id", metadata);
    addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common.other", metadata);
    addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common.type", metadata);
    addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.audit", metadata);

    metadata.addPackage("com.evolveum.midpoint.repo.sql.type");

    SchemaExport export = new SchemaExport();
    export.setOutputFile(fileName);
    export.setDelimiter(";");
    //        export.setFormat(true);
    export.execute(EnumSet.of(TargetType.SCRIPT), SchemaExport.Action.CREATE, metadata.buildMetadata());
}

From source file:com.foilen.smalltools.tools.Hibernate51Tools.java

License:Open Source License

/**
 * Generate the SQL file. This is based on the code in {@link LocalSessionFactoryBuilder#scanPackages(String...)}
 *
 * @param dialect/*from   w  w  w  . jav  a 2s. c  om*/
 *            the dialect (e.g: org.hibernate.dialect.MySQL5InnoDBDialect )
 * @param outputSqlFile
 *            where to put the generated SQL file
 * @param useUnderscore
 *            true: to have tables names like "employe_manager" ; false: to have tables names like "employeManager"
 * @param packagesToScan
 *            the packages where your entities are
 */
public static void generateSqlSchema(Class<? extends Dialect> dialect, String outputSqlFile,
        boolean useUnderscore, String... packagesToScan) {

    BootstrapServiceRegistry bootstrapServiceRegistry = new BootstrapServiceRegistryBuilder().build();

    MetadataSources metadataSources = new MetadataSources(bootstrapServiceRegistry);

    ClassPathScanningCandidateComponentProvider scanner = new ClassPathScanningCandidateComponentProvider(
            false);
    scanner.addIncludeFilter(new AnnotationTypeFilter(Entity.class));
    scanner.addIncludeFilter(new AnnotationTypeFilter(Embeddable.class));
    scanner.addIncludeFilter(new AnnotationTypeFilter(MappedSuperclass.class));
    for (String pkg : packagesToScan) {
        for (BeanDefinition beanDefinition : scanner.findCandidateComponents(pkg)) {
            metadataSources.addAnnotatedClassName(beanDefinition.getBeanClassName());
        }
    }

    StandardServiceRegistryBuilder standardServiceRegistryBuilder = new StandardServiceRegistryBuilder(
            bootstrapServiceRegistry);
    standardServiceRegistryBuilder.applySetting(AvailableSettings.DIALECT, dialect.getName());
    StandardServiceRegistryImpl ssr = (StandardServiceRegistryImpl) standardServiceRegistryBuilder.build();
    MetadataBuilder metadataBuilder = metadataSources.getMetadataBuilder(ssr);

    if (useUnderscore) {
        metadataBuilder.applyImplicitNamingStrategy(new SpringImplicitNamingStrategy());
        metadataBuilder.applyPhysicalNamingStrategy(new SpringPhysicalNamingStrategy());
    }

    new SchemaExport() //
            .setHaltOnError(true) //
            .setOutputFile(outputSqlFile) //
            .setFormat(true) //
            .setDelimiter(";") //
            .execute(EnumSet.of(TargetType.SCRIPT), SchemaExport.Action.CREATE, metadataBuilder.build());

}

From source file:com.imos.sample.service.HibernateService.java

/**
 * Hibernate configuration./*from   w  ww . jav a  2  s .  c o m*/
 *
 * @throws RepositoryException
 */
public void config() throws RepositoryException {
    try {
        StandardServiceRegistryBuilder registryBuilder = new StandardServiceRegistryBuilder();
        if (filePath == null || filePath.isEmpty()) {
            registryBuilder = registryBuilder.configure();
        } else {
            registryBuilder = registryBuilder.configure(filePath);
        }
        registry = registryBuilder.build();

        MetadataSources metaData = new MetadataSources(registry);
        sessionFactory = metaData.buildMetadata().buildSessionFactory();
        session = sessionFactory.openSession();

        SchemaExport schemaExport = new SchemaExport();
        schemaExport.setDelimiter(";");
        schemaExport.setFormat(true);
        schemaExport.setManageNamespaces(true);
        schemaExport.setOutputFile("./ddl_skilldb.sql");
        schemaExport.execute(EnumSet.of(TargetType.SCRIPT, TargetType.DATABASE, TargetType.STDOUT),
                SchemaExport.Action.CREATE, metaData.buildMetadata(registry), registry);

        log.info("Configuration succeed");
    } catch (HibernateException e) {
        StandardServiceRegistryBuilder.destroy(registry);
        log.error("Configuration failed : {}", e);
    }
}

From source file:com.oneandone.relesia.tools.SQLScriptGenerator.java

License:Apache License

public static void main(String[] args) throws MappingException, IOException {

    String createSQLFile = "dbscripts/createTables.sql";
    String dropSQLFile = "dbscripts/dropTables.sql";
    String hibernateCfgFile = "/db/hibernate.cfg.xml";

    final EnumSet<TargetType> targetTypes = EnumSet.noneOf(TargetType.class);
    targetTypes.add(TargetType.SCRIPT);//from w ww.j  av a2 s  .  c o m

    System.out.println("Initialize Hibernate configuration from " + hibernateCfgFile);

    Configuration cfg = new Configuration().configure(hibernateCfgFile);
    Metadata metadata = MetadataHelper.getMetadata(cfg);

    SchemaExport export = new SchemaExport();
    export.setHaltOnError(true);
    export.setFormat(true);
    export.setDelimiter(";");

    System.out.println("Generating create SQL to file " + createSQLFile);
    if (new File(createSQLFile).exists()) {
        Files.delete(Paths.get(createSQLFile));
    }
    export.setOutputFile(createSQLFile);
    export.execute(targetTypes, Action.CREATE, metadata);

    System.out.println("Generating drop SQL to file " + dropSQLFile);
    export.setOutputFile(dropSQLFile);
    if (new File(dropSQLFile).exists()) {
        Files.delete(Paths.get(dropSQLFile));
    }
    export.execute(targetTypes, Action.DROP, metadata);

    System.out.println("Done!");
}

From source file:com.yahoo.elide.datastores.hibernate5.HibernateEntityManagerDataStoreSupplier.java

License:Apache License

@Override
public DataStore get() {
    // Add additional checks to our static check mappings map.
    // NOTE: This is a bit hacky. We need to do a major overhaul on our test architecture
    TestCheckMappings.MAPPINGS.put("filterCheck", Filtered.FilterCheck.class);
    TestCheckMappings.MAPPINGS.put("filterCheck3", Filtered.FilterCheck3.class);

    Map<String, Object> options = new HashMap<>();
    ArrayList<Class> bindClasses = new ArrayList<>();

    try {// w  w w  .  j av a2 s  .c om
        bindClasses.addAll(ClassScanner.getAnnotatedClasses(Parent.class.getPackage(), Entity.class));
    } catch (MappingException e) {
        throw new IllegalStateException(e);
    }

    options.put("javax.persistence.jdbc.driver", "com.mysql.jdbc.Driver");
    options.put("javax.persistence.jdbc.url",
            JDBC_PREFIX + System.getProperty(MYSQL_PORT_PROPERTY, MYSQL_PORT) + JDBC_SUFFIX);
    options.put("javax.persistence.jdbc.user", ROOT);
    options.put("javax.persistence.jdbc.password", ROOT);
    options.put(AvailableSettings.LOADED_CLASSES, bindClasses);

    EntityManagerFactory emf = Persistence.createEntityManagerFactory("elide-tests", options);
    HibernateEntityManager em = (HibernateEntityManager) emf.createEntityManager();

    // method to force class initialization
    MetadataSources metadataSources = new MetadataSources(new StandardServiceRegistryBuilder()
            .configure("hibernate.cfg.xml").applySetting(Environment.CURRENT_SESSION_CONTEXT_CLASS, "thread")
            .applySetting(Environment.URL,
                    JDBC_PREFIX + System.getProperty(MYSQL_PORT_PROPERTY, MYSQL_PORT) + JDBC_SUFFIX)
            .applySetting(Environment.USER, ROOT).applySetting(Environment.PASS, ROOT).build());

    try {
        ClassScanner.getAnnotatedClasses(Parent.class.getPackage(), Entity.class)
                .forEach(metadataSources::addAnnotatedClass);
    } catch (MappingException e) {
        throw new IllegalStateException(e);
    }

    MetadataImplementor metadataImplementor = (MetadataImplementor) metadataSources.buildMetadata();

    EnumSet<TargetType> type = EnumSet.of(TargetType.DATABASE);
    // create example tables from beans
    SchemaExport schemaExport = new SchemaExport();
    schemaExport.drop(type, metadataImplementor);
    schemaExport.execute(type, SchemaExport.Action.CREATE, metadataImplementor);

    if (!schemaExport.getExceptions().isEmpty()) {
        throw new IllegalStateException(schemaExport.getExceptions().toString());
    }

    return new AbstractHibernateStore.Builder(em).withScrollEnabled(true)
            .withScrollMode(ScrollMode.FORWARD_ONLY).build();
}

From source file:de.jpdigital.maven.plugins.hibernate5ddl.GenerateDdlMojo.java

License:Open Source License

/**
 * Helper method for generating the DDL classes for a specific dialect. This
 * is place for the real work is done. The method first creates an instance
 * of the {@link Configuration} class from Hibernate an puts the appropriate
 * values into it. It then creates an instance of the {@link SchemaExport}
 * class from the Hibernate API, configured this class, for example by
 * setting {@code format} to {@code true} so that the generated SQL files
 * are formatted nicely. After that it calls the
 * {@link SchemaExport#execute(boolean, boolean, boolean, boolean)} method
 * which will create the SQL script file. The method is called in a way
 * which requires <em>no</em> database connection.
 *
 *
 * @param dialect       The dialect for which the DDL files is generated.
 * @param entityClasses The entity classes for which the DDL file is
 *                      generated.//from w  w w . j ava2s .  co  m
 *
 * @throws MojoFailureException if something goes wrong.
 */
private void generateDdl(final Dialect dialect, final Set<Class<?>> entityClasses) throws MojoFailureException {

    final StandardServiceRegistryBuilder registryBuilder = new StandardServiceRegistryBuilder();
    processPersistenceXml(registryBuilder);

    if (createDropStatements) {
        registryBuilder.applySetting("hibernate.hbm2ddl.auto", "create-drop");
    } else {
        registryBuilder.applySetting("hibernate.hbm2ddl.auto", "create");
    }

    registryBuilder.applySetting("hibernate.dialect", dialect.getDialectClass());

    final StandardServiceRegistry standardRegistry = registryBuilder.build();

    final MetadataSources metadataSources = new MetadataSources(standardRegistry);

    for (final Class<?> entityClass : entityClasses) {
        metadataSources.addAnnotatedClass(entityClass);
    }

    final SchemaExport export = new SchemaExport();
    //        final SchemaExport export = new SchemaExport(
    //            (MetadataImplementor) metadata, true);
    export.setDelimiter(";");

    final Path tmpDir;
    try {
        tmpDir = Files.createTempDirectory("maven-hibernate5-ddl-plugin");
    } catch (IOException ex) {
        throw new MojoFailureException("Failed to create work dir.", ex);
    }

    final Metadata metadata = metadataSources.buildMetadata();

    export.setOutputFile(
            String.format("%s/%s.sql", tmpDir.toString(), dialect.name().toLowerCase(Locale.ENGLISH)));
    export.setFormat(true);
    if (createDropStatements) {
        export.execute(EnumSet.of(TargetType.SCRIPT), SchemaExport.Action.BOTH, metadata);
    } else {
        export.execute(EnumSet.of(TargetType.SCRIPT), SchemaExport.Action.CREATE, metadata);
    }

    writeOutputFile(dialect, tmpDir);
}

From source file:org.geosdi.geoplatform.persistence.configuration.hibernate.export.GPHibernateSchemaExport.java

License:Open Source License

@Override
protected void createSchema() {
    if ((this.generateSchema != null) && (this.generateSchema.equalsIgnoreCase("generate"))) {
        Set<Class<?>> annotatedClasses = reflectionsSchemaExport.getAnnotatedClasses();

        if (annotatedClasses.isEmpty()) {
            throw new IllegalStateException("There are no Classes Annotated with" + " @Entity Annotations.");
        }//from  ww w .j  a  va2  s . c o m

        ServiceRegistry serviceRegistry = new StandardServiceRegistryBuilder()
                .applySettings(hibernateProperties).build();
        MetadataSources metadataSource = new MetadataSources(serviceRegistry);
        for (Class<?> classe : annotatedClasses) {
            metadataSource.addAnnotatedClass(classe);
        }
        MetadataImplementor metadata = (MetadataImplementor) metadataSource.buildMetadata();
        schema = new SchemaExport();
        super.exportSchema(metadata);
    }
}

From source file:org.glite.security.voms.admin.persistence.deployer.SchemaDeployer.java

License:Apache License

private void doUndeploy() {

    checkVoExistence();//from   w  w  w.jav a2s. c  o m

    log.info("Undeploying voms database...");

    int existingDB = checkDatabaseExistence();

    if (existingDB == 1) {
        log.error(
                "This tool cannot undeploy voms-admin 1.2.x database! Please upgrade to voms-admin 2 or use voms-admin-configure 1.2.x tools to undeploy this database.");
        System.exit(-1);
    }

    if (existingDB == 2) {

        log.error(
                "This tool cannot undeploy voms-admin 2.0.x databases! Please either upgrade the database to voms-admin 2.5 (using this tool) or use voms-admin-configure 2.0.x"
                        + " tools to undeploy this database");

        System.exit(-1);
    }
    if (existingDB < 0) {

        log.error("No voms-admin database found!");
        System.exit(-1);
    }

    checkDatabaseWritable();

    SchemaExport export = new SchemaExport();

    EnumSet<TargetType> targetTypes = EnumSet.of(TargetType.DATABASE);

    Metadata md = getHibernateMetadataSources().getMetadataBuilder().build();
    export.drop(targetTypes, md);

    @SuppressWarnings("rawtypes")
    List l = export.getExceptions();

    if (!l.isEmpty()) {
        log.error("Error undeploying voms database!");
        printExceptions(l);
        System.exit(2);
    }

    log.info("Database undeployed correctly!");

}

From source file:org.glite.security.voms.admin.persistence.deployer.SchemaDeployer.java

License:Apache License

private void doDeploy() {

    checkVoExistence();/*from w w w  .jav  a  2  s  . c o m*/

    int existingDb = checkDatabaseExistence();

    if (existingDb > 0) {
        final String adminDbVersion = VOMSVersionDAO.instance().getVersion().getAdminVersion().trim();

        log.warn("Existing voms database found. Will not overwrite " + "the database! (admin db version: {})",
                adminDbVersion);
        System.exit(0);
    }

    checkDatabaseWritable();

    SchemaExport exporter = new SchemaExport();

    EnumSet<TargetType> targetTypes = EnumSet.of(TargetType.DATABASE);
    exporter.createOnly(targetTypes, HibernateFactory.getMetadata());

    log.info("Deploying voms database...");

    @SuppressWarnings("rawtypes")
    List l = exporter.getExceptions();

    if (!l.isEmpty()) {
        log.error("Error deploying voms database!");
        printExceptions(l);
        System.exit(2);

    }

    // This is needed as the version of hibernate we are using
    // does not support defining indexes on join table columns
    // See: https://hibernate.atlassian.net/browse/HHH-4263
    CreateAuditEventDataIndexes createIndexTask = new CreateAuditEventDataIndexes(
            HibernateFactory.getSession());

    HibernateFactory.beginTransaction();

    createIndexTask.run();

    CreateAttributeValueIndex avIndexTask = new CreateAttributeValueIndex(HibernateFactory.getSession());

    avIndexTask.run();

    UpdateCATask caTask = new UpdateCATask();
    caTask.run();

    DatabaseSetupTask task = DatabaseSetupTask.instance();
    task.run();

    HibernateFactory.commitTransaction();
    log.info("Database deployed correctly!");

}