Example usage for org.hibernate.tool.hbm2ddl SchemaExport execute

List of usage examples for org.hibernate.tool.hbm2ddl SchemaExport execute

Introduction

In this page you can find the example usage for org.hibernate.tool.hbm2ddl SchemaExport execute.

Prototype

public void execute(EnumSet<TargetType> targetTypes, Action action, Metadata metadata) 

Source Link

Usage

From source file:com.evolveum.midpoint.repo.sql.SchemaTest.java

License:Apache License

private void createSQLSchema(String fileName, String dialect) {
    File file = new File(fileName);
    if (file.exists()) {
        file.delete();//  w  w w . j  av a  2  s. c  om
    }

    MetadataSources metadata = new MetadataSources(new StandardServiceRegistryBuilder()
            .applySetting("hibernate.implicit_naming_strategy", new MidPointImplicitNamingStrategy())
            .applySetting("hibernate.physical_naming_strategy", new MidPointPhysicalNamingStrategy())
            .applySetting("hibernate.dialect", dialect).build());

    addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common", metadata);
    addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common.container", metadata);
    addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common.any", metadata);
    addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common.embedded", metadata);
    addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common.enums", metadata);
    addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common.id", metadata);
    addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common.other", metadata);
    addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.common.type", metadata);
    addAnnotatedClasses("com.evolveum.midpoint.repo.sql.data.audit", metadata);

    metadata.addPackage("com.evolveum.midpoint.repo.sql.type");

    SchemaExport export = new SchemaExport();
    export.setOutputFile(fileName);
    export.setDelimiter(";");
    //        export.setFormat(true);
    export.execute(EnumSet.of(TargetType.SCRIPT), SchemaExport.Action.CREATE, metadata.buildMetadata());
}

From source file:com.oneandone.relesia.tools.SQLScriptGenerator.java

License:Apache License

public static void main(String[] args) throws MappingException, IOException {

    String createSQLFile = "dbscripts/createTables.sql";
    String dropSQLFile = "dbscripts/dropTables.sql";
    String hibernateCfgFile = "/db/hibernate.cfg.xml";

    final EnumSet<TargetType> targetTypes = EnumSet.noneOf(TargetType.class);
    targetTypes.add(TargetType.SCRIPT);/*w w  w .  jav  a 2 s  .com*/

    System.out.println("Initialize Hibernate configuration from " + hibernateCfgFile);

    Configuration cfg = new Configuration().configure(hibernateCfgFile);
    Metadata metadata = MetadataHelper.getMetadata(cfg);

    SchemaExport export = new SchemaExport();
    export.setHaltOnError(true);
    export.setFormat(true);
    export.setDelimiter(";");

    System.out.println("Generating create SQL to file " + createSQLFile);
    if (new File(createSQLFile).exists()) {
        Files.delete(Paths.get(createSQLFile));
    }
    export.setOutputFile(createSQLFile);
    export.execute(targetTypes, Action.CREATE, metadata);

    System.out.println("Generating drop SQL to file " + dropSQLFile);
    export.setOutputFile(dropSQLFile);
    if (new File(dropSQLFile).exists()) {
        Files.delete(Paths.get(dropSQLFile));
    }
    export.execute(targetTypes, Action.DROP, metadata);

    System.out.println("Done!");
}

From source file:com.yahoo.elide.datastores.hibernate5.HibernateEntityManagerDataStoreSupplier.java

License:Apache License

@Override
public DataStore get() {
    // Add additional checks to our static check mappings map.
    // NOTE: This is a bit hacky. We need to do a major overhaul on our test architecture
    TestCheckMappings.MAPPINGS.put("filterCheck", Filtered.FilterCheck.class);
    TestCheckMappings.MAPPINGS.put("filterCheck3", Filtered.FilterCheck3.class);

    Map<String, Object> options = new HashMap<>();
    ArrayList<Class> bindClasses = new ArrayList<>();

    try {//from w w  w .j a  v a2 s .c om
        bindClasses.addAll(ClassScanner.getAnnotatedClasses(Parent.class.getPackage(), Entity.class));
    } catch (MappingException e) {
        throw new IllegalStateException(e);
    }

    options.put("javax.persistence.jdbc.driver", "com.mysql.jdbc.Driver");
    options.put("javax.persistence.jdbc.url",
            JDBC_PREFIX + System.getProperty(MYSQL_PORT_PROPERTY, MYSQL_PORT) + JDBC_SUFFIX);
    options.put("javax.persistence.jdbc.user", ROOT);
    options.put("javax.persistence.jdbc.password", ROOT);
    options.put(AvailableSettings.LOADED_CLASSES, bindClasses);

    EntityManagerFactory emf = Persistence.createEntityManagerFactory("elide-tests", options);
    HibernateEntityManager em = (HibernateEntityManager) emf.createEntityManager();

    // method to force class initialization
    MetadataSources metadataSources = new MetadataSources(new StandardServiceRegistryBuilder()
            .configure("hibernate.cfg.xml").applySetting(Environment.CURRENT_SESSION_CONTEXT_CLASS, "thread")
            .applySetting(Environment.URL,
                    JDBC_PREFIX + System.getProperty(MYSQL_PORT_PROPERTY, MYSQL_PORT) + JDBC_SUFFIX)
            .applySetting(Environment.USER, ROOT).applySetting(Environment.PASS, ROOT).build());

    try {
        ClassScanner.getAnnotatedClasses(Parent.class.getPackage(), Entity.class)
                .forEach(metadataSources::addAnnotatedClass);
    } catch (MappingException e) {
        throw new IllegalStateException(e);
    }

    MetadataImplementor metadataImplementor = (MetadataImplementor) metadataSources.buildMetadata();

    EnumSet<TargetType> type = EnumSet.of(TargetType.DATABASE);
    // create example tables from beans
    SchemaExport schemaExport = new SchemaExport();
    schemaExport.drop(type, metadataImplementor);
    schemaExport.execute(type, SchemaExport.Action.CREATE, metadataImplementor);

    if (!schemaExport.getExceptions().isEmpty()) {
        throw new IllegalStateException(schemaExport.getExceptions().toString());
    }

    return new AbstractHibernateStore.Builder(em).withScrollEnabled(true)
            .withScrollMode(ScrollMode.FORWARD_ONLY).build();
}

From source file:de.jpdigital.maven.plugins.hibernate5ddl.GenerateDdlMojo.java

License:Open Source License

/**
 * Helper method for generating the DDL classes for a specific dialect. This
 * is place for the real work is done. The method first creates an instance
 * of the {@link Configuration} class from Hibernate an puts the appropriate
 * values into it. It then creates an instance of the {@link SchemaExport}
 * class from the Hibernate API, configured this class, for example by
 * setting {@code format} to {@code true} so that the generated SQL files
 * are formatted nicely. After that it calls the
 * {@link SchemaExport#execute(boolean, boolean, boolean, boolean)} method
 * which will create the SQL script file. The method is called in a way
 * which requires <em>no</em> database connection.
 *
 *
 * @param dialect       The dialect for which the DDL files is generated.
 * @param entityClasses The entity classes for which the DDL file is
 *                      generated.//from  ww w  . j a v a2  s  . co m
 *
 * @throws MojoFailureException if something goes wrong.
 */
private void generateDdl(final Dialect dialect, final Set<Class<?>> entityClasses) throws MojoFailureException {

    final StandardServiceRegistryBuilder registryBuilder = new StandardServiceRegistryBuilder();
    processPersistenceXml(registryBuilder);

    if (createDropStatements) {
        registryBuilder.applySetting("hibernate.hbm2ddl.auto", "create-drop");
    } else {
        registryBuilder.applySetting("hibernate.hbm2ddl.auto", "create");
    }

    registryBuilder.applySetting("hibernate.dialect", dialect.getDialectClass());

    final StandardServiceRegistry standardRegistry = registryBuilder.build();

    final MetadataSources metadataSources = new MetadataSources(standardRegistry);

    for (final Class<?> entityClass : entityClasses) {
        metadataSources.addAnnotatedClass(entityClass);
    }

    final SchemaExport export = new SchemaExport();
    //        final SchemaExport export = new SchemaExport(
    //            (MetadataImplementor) metadata, true);
    export.setDelimiter(";");

    final Path tmpDir;
    try {
        tmpDir = Files.createTempDirectory("maven-hibernate5-ddl-plugin");
    } catch (IOException ex) {
        throw new MojoFailureException("Failed to create work dir.", ex);
    }

    final Metadata metadata = metadataSources.buildMetadata();

    export.setOutputFile(
            String.format("%s/%s.sql", tmpDir.toString(), dialect.name().toLowerCase(Locale.ENGLISH)));
    export.setFormat(true);
    if (createDropStatements) {
        export.execute(EnumSet.of(TargetType.SCRIPT), SchemaExport.Action.BOTH, metadata);
    } else {
        export.execute(EnumSet.of(TargetType.SCRIPT), SchemaExport.Action.CREATE, metadata);
    }

    writeOutputFile(dialect, tmpDir);
}

From source file:org.springframework.cloud.dataflow.server.repository.SchemaGenerationTests.java

License:Apache License

private void generateDdlFiles(String dialect, File tempDir, PersistenceUnitInfo persistenceUnitInfo) {
    logger.info("Generating DDL script for " + dialect);

    final MetadataSources metadata = new MetadataSources(new StandardServiceRegistryBuilder()
            .applySetting("hibernate.dialect", "org.hibernate.dialect." + dialect + "Dialect")
            .applySetting("hibernate.physical_naming_strategy", SpringPhysicalNamingStrategy.class.getName())
            .applySetting("hibernate.implicit_naming_strategy", SpringImplicitNamingStrategy.class.getName())
            .build());/*from w ww . ja  v  a  2 s.  c  om*/

    for (String clazz : persistenceUnitInfo.getManagedClassNames()) {
        logger.info(clazz);
        metadata.addAnnotatedClassName(clazz);
    }

    final SchemaExport export;
    try {
        export = new SchemaExport();
        export.setDelimiter(";");
        export.setFormat(true);
        export.setOutputFile(new File(tempDir, "schema-" + dialect.toLowerCase() + ".sql").getAbsolutePath());
    } catch (HibernateException e) {
        throw new IllegalStateException(e);
    }
    EnumSet<TargetType> targetTypes = EnumSet.of(TargetType.SCRIPT);
    export.execute(targetTypes, SchemaExport.Action.BOTH, metadata.buildMetadata());
}