Example usage for org.hibernate.tool.hbm2ddl SchemaExport setDelimiter

List of usage examples for org.hibernate.tool.hbm2ddl SchemaExport setDelimiter

Introduction

In this page you can find the example usage for org.hibernate.tool.hbm2ddl SchemaExport setDelimiter.

Prototype

public SchemaExport setDelimiter(String delimiter) 

Source Link

Document

Set the end of statement delimiter

Usage

From source file:com.foilen.smalltools.tools.Hibernate4Tools.java

License:Open Source License

/**
 * Generate the SQL file. This is based on the code in {@link LocalSessionFactoryBuilder#scanPackages(String...)}
 *
 * @param dialect//from   w ww .  ja  v  a  2  s .  co  m
 *            the dialect (e.g: org.hibernate.dialect.MySQL5InnoDBDialect )
 * @param outputSqlFile
 *            where to put the generated SQL file
 * @param useUnderscore
 *            true: to have tables names like "employe_manager" ; false: to have tables names like "employeManager"
 * @param packagesToScan
 *            the packages where your entities are
 */
@SuppressWarnings("deprecation")
public static void generateSqlSchema(Class<? extends Dialect> dialect, String outputSqlFile,
        boolean useUnderscore, String... packagesToScan) {

    // Configuration
    Configuration configuration = new Configuration();
    if (useUnderscore) {
        configuration.setNamingStrategy(new ImprovedNamingStrategy());
    }

    Properties properties = new Properties();
    properties.setProperty(AvailableSettings.DIALECT, dialect.getName());

    // Scan packages
    Set<String> classNames = new TreeSet<String>();
    Set<String> packageNames = new TreeSet<String>();
    try {
        for (String pkg : packagesToScan) {
            String pattern = ResourcePatternResolver.CLASSPATH_ALL_URL_PREFIX
                    + ClassUtils.convertClassNameToResourcePath(pkg) + RESOURCE_PATTERN;
            Resource[] resources = resourcePatternResolver.getResources(pattern);
            MetadataReaderFactory readerFactory = new CachingMetadataReaderFactory(resourcePatternResolver);
            for (Resource resource : resources) {
                if (resource.isReadable()) {
                    MetadataReader reader = readerFactory.getMetadataReader(resource);
                    String className = reader.getClassMetadata().getClassName();
                    if (matchesEntityTypeFilter(reader, readerFactory)) {
                        classNames.add(className);
                    } else if (className.endsWith(PACKAGE_INFO_SUFFIX)) {
                        packageNames
                                .add(className.substring(0, className.length() - PACKAGE_INFO_SUFFIX.length()));
                    }
                }
            }
        }
    } catch (IOException ex) {
        throw new MappingException("Failed to scan classpath for unlisted classes", ex);
    }
    try {
        for (String className : classNames) {
            configuration.addAnnotatedClass(resourcePatternResolver.getClassLoader().loadClass(className));
        }
        for (String packageName : packageNames) {
            configuration.addPackage(packageName);
        }
    } catch (ClassNotFoundException ex) {
        throw new MappingException("Failed to load annotated classes from classpath", ex);
    }

    // Exportation
    SchemaExport schemaExport = new SchemaExport(configuration, properties);
    schemaExport.setOutputFile(outputSqlFile);
    schemaExport.setDelimiter(";");
    schemaExport.setFormat(true);
    schemaExport.execute(true, false, false, true);
}

From source file:com.foilen.smalltools.tools.Hibernate50Tools.java

License:Open Source License

/**
 * Generate the SQL file. This is based on the code in {@link LocalSessionFactoryBuilder#scanPackages(String...)}
 *
 * @param dialect/* ww w.jav a 2  s.co m*/
 *            the dialect (e.g: org.hibernate.dialect.MySQL5InnoDBDialect )
 * @param outputSqlFile
 *            where to put the generated SQL file
 * @param useUnderscore
 *            true: to have tables names like "employe_manager" ; false: to have tables names like "employeManager"
 * @param packagesToScan
 *            the packages where your entities are
 */
public static void generateSqlSchema(Class<? extends Dialect> dialect, String outputSqlFile,
        boolean useUnderscore, String... packagesToScan) {

    BootstrapServiceRegistry bootstrapServiceRegistry = new BootstrapServiceRegistryBuilder().build();

    MetadataSources metadataSources = new MetadataSources(bootstrapServiceRegistry);

    ClassPathScanningCandidateComponentProvider scanner = new ClassPathScanningCandidateComponentProvider(
            false);
    scanner.addIncludeFilter(new AnnotationTypeFilter(Entity.class));
    scanner.addIncludeFilter(new AnnotationTypeFilter(Embeddable.class));
    scanner.addIncludeFilter(new AnnotationTypeFilter(MappedSuperclass.class));
    for (String pkg : packagesToScan) {
        for (BeanDefinition beanDefinition : scanner.findCandidateComponents(pkg)) {
            metadataSources.addAnnotatedClassName(beanDefinition.getBeanClassName());
        }
    }

    StandardServiceRegistryBuilder standardServiceRegistryBuilder = new StandardServiceRegistryBuilder(
            bootstrapServiceRegistry);
    standardServiceRegistryBuilder.applySetting(AvailableSettings.DIALECT, dialect.getName());
    StandardServiceRegistryImpl ssr = (StandardServiceRegistryImpl) standardServiceRegistryBuilder.build();
    MetadataBuilder metadataBuilder = metadataSources.getMetadataBuilder(ssr);

    if (useUnderscore) {
        metadataBuilder.applyImplicitNamingStrategy(new SpringImplicitNamingStrategy());
        metadataBuilder.applyPhysicalNamingStrategy(new SpringPhysicalNamingStrategy());
    }

    MetadataImpl metadata = (MetadataImpl) metadataBuilder.build();

    // Exportation
    SchemaExport schemaExport = new SchemaExport(metadata);
    schemaExport.setOutputFile(outputSqlFile);
    schemaExport.setDelimiter(";");
    schemaExport.setFormat(true);
    schemaExport.execute(true, false, false, true);
}

From source file:com.github.antennaesdk.messageserver.db.H2.H2Database.java

License:Apache License

public void generateSchemaAndCreateTables(SimpleDriverDataSource dataSource) {

    // Get the tables that are already in the DATABASE
    List<String> tables = new ArrayList<>();
    try {/*from   w  ww.  ja va  2 s.c  o  m*/
        Connection connection = dataSource.getConnection();
        DatabaseMetaData databaseMetadata = connection.getMetaData();
        ResultSet resultSet = databaseMetadata.getTables(null, null, null, new String[] { "TABLE" });
        while (resultSet.next()) {
            String table = resultSet.getString(3);
            logger.info("Table : " + table + " ... exists");
            tables.add(table);
        }
    } catch (SQLException e) {
        e.printStackTrace();
    }

    // Get the tables that are needed from Entity Classes
    List<Class> tablesToCreate = new ArrayList<>();
    for (Class<?> c : entityClasses) {
        // get the table names
        Table table = c.getAnnotation(Table.class);

        logger.info("Entity: " + c.getName() + " , Table: " + table.name());
        boolean isExisting = false;
        for (String dbTable : tables) {
            if (dbTable.equals(table.name())) {
                isExisting = true;
                break;
            }
        }

        if (!isExisting) {
            // these tables must be created
            tablesToCreate.add(c);
        }
    }

    // Check whether the tables need to be created...
    if (tablesToCreate.size() == 0) {
        logger.info("Tables already exist... ");
        return;
    } else {
        logger.info("Creating tables...");
    }

    //create a minimal configuration
    org.hibernate.cfg.Configuration cfg = new org.hibernate.cfg.Configuration();
    cfg.setProperty("hibernate.dialect", "org.hibernate.dialect.H2Dialect");
    cfg.setProperty("hibernate.hbm2ddl.auto", "create");

    // create a temporary file to write the DDL
    File ddlFile = null;
    try {
        File dir = getDirectoryFromClasspath();
        ddlFile = File.createTempFile("H2_", ".SQL", dir);
        ddlFile.deleteOnExit();
    } catch (IOException e) {
        e.printStackTrace();
    }

    // add the tables to be created
    for (Class c : tablesToCreate) {
        cfg.addAnnotatedClass(c);
    }

    //build all the mappings, before calling the AuditConfiguration
    cfg.buildMappings();
    cfg.getProperties().setProperty(AvailableSettings.HBM2DDL_IMPORT_FILES, ddlFile.getName());

    cfg.getProperties().setProperty("hibernate.connection.driver_class", "org.h2.Driver");
    cfg.getProperties().setProperty("hibernate.connection.url", dataSource.getUrl());
    cfg.getProperties().setProperty("hibernate.connection.username", dataSource.getUsername());
    cfg.getProperties().setProperty("hibernate.connection.password", dataSource.getPassword());

    //execute the export
    SchemaExport export = new SchemaExport(cfg);

    export.setDelimiter(";");
    export.setFormat(true);
    // create the tables in the DB and show the DDL in console
    export.create(true, true);
}

From source file:com.github.gekoh.yagen.ddl.DDLGenerator.java

License:Apache License

public void writeDDL(Profile profile) {
    SchemaExport export = new SchemaExportFactory().createSchemaExport(profile);
    export.setDelimiter(";");
    export.setFormat(true);// w w w.j  av  a2s  .  c  o  m
    export.setOutputFile(profile.getOutputFile());
    export.execute(true, false, false, true);

    LOG.info("schema script written to file {}", profile.getOutputFile());
}

From source file:com.ikon.dao.HibernateUtil.java

License:Open Source License

/**
 * Generate database schema and initial data for a defined dialect
 *///from  w w  w .j a  v a2s  .c  om
public static void generateDatabase(String dialect) throws IOException {
    // Configure Hibernate
    log.info("Exporting Database Schema...");
    String dbSchema = EnvironmentDetector.getUserHome() + "/schema.sql";
    Configuration cfg = getConfiguration().configure();
    cfg.setProperty("hibernate.dialect", dialect);
    SchemaExport se = new SchemaExport(cfg);
    se.setOutputFile(dbSchema);
    se.setDelimiter(";");
    se.setFormat(false);
    se.create(false, false);
    log.info("Database Schema exported to {}", dbSchema);

    String initialData = new File("").getAbsolutePath() + "/src/main/resources/default.sql";
    log.info("Exporting Initial Data from '{}'...", initialData);
    String initData = EnvironmentDetector.getUserHome() + "/data.sql";
    FileInputStream fis = new FileInputStream(initialData);
    String ret = DatabaseDialectAdapter.dialectAdapter(fis, dialect);
    FileWriter fw = new FileWriter(initData);
    IOUtils.write(ret, fw);
    fw.flush();
    fw.close();
    log.info("Initial Data exported to {}", initData);
}

From source file:com.imos.sample.service.HibernateService.java

/**
 * Hibernate configuration.//from   w w w.ja  va  2  s .  c o m
 *
 * @throws RepositoryException
 */
public void config() throws RepositoryException {
    try {
        StandardServiceRegistryBuilder registryBuilder = new StandardServiceRegistryBuilder();
        if (filePath == null || filePath.isEmpty()) {
            registryBuilder = registryBuilder.configure();
        } else {
            registryBuilder = registryBuilder.configure(filePath);
        }
        registry = registryBuilder.build();

        MetadataSources metaData = new MetadataSources(registry);
        sessionFactory = metaData.buildMetadata().buildSessionFactory();
        session = sessionFactory.openSession();

        SchemaExport schemaExport = new SchemaExport();
        schemaExport.setDelimiter(";");
        schemaExport.setFormat(true);
        schemaExport.setManageNamespaces(true);
        schemaExport.setOutputFile("./ddl_skilldb.sql");
        schemaExport.execute(EnumSet.of(TargetType.SCRIPT, TargetType.DATABASE, TargetType.STDOUT),
                SchemaExport.Action.CREATE, metaData.buildMetadata(registry), registry);

        log.info("Configuration succeed");
    } catch (HibernateException e) {
        StandardServiceRegistryBuilder.destroy(registry);
        log.error("Configuration failed : {}", e);
    }
}

From source file:com.jada.jpa.util.JpaSchemaExport.java

License:Open Source License

public void export() throws Exception {
    AnnotationConfiguration configuration = new AnnotationConfiguration();
    configuration.setProperty("hibernate.hbm2ddl.auto", "create");
    PersistenceLoader persistenceLoader = PersistenceLoader.getInstance();
    Persistence persistence = persistenceLoader.getPersistence();
    for (String className : persistence.getPersistenceUnit().getClassNames()) {
        Class<?> c = Class.forName(className);
        configuration.addAnnotatedClass(c);
    }/*  w w w  .jav a  2s . c om*/

    configuration.setProperty("hibernate.dialect", dialectName);
    SchemaExport exporter = new SchemaExport(configuration);
    exporter.setDelimiter(";");
    exporter.setOutputFile(fileName);

    boolean script = true;
    boolean export = false;
    boolean justDrop = false;
    boolean justCreate = false;
    exporter.execute(script, export, justDrop, justCreate);
}

From source file:com.klistret.cmdb.utility.hibernate.CMDBDatabaseHelper.java

License:Open Source License

public void generateDatabaseSchema(String path, boolean display, boolean execute) {
    SchemaExport sSchemaExport = new SchemaExport(CMDBDatabaseHelper.sConfiguration);
    sSchemaExport.setOutputFile(path);/*from ww  w  .  j a v  a2s  .  c  o m*/
    sSchemaExport.setDelimiter(delimiter);
    sSchemaExport.create(display, execute);
}

From source file:com.medigy.tool.persist.hibernate.ddl.GenerateDDLTask.java

License:Open Source License

public void execute() throws BuildException {
    if (hibernateConfigClass == null)
        throw new BuildException("hibernateConfigClass was not provided.");

    if (destDir == null)
        throw new BuildException("destDir was not provided.");

    try {/*from  ww  w  .jav  a2  s . c  om*/
        final SqlDataDefinitionFilter createFilter = (SqlDataDefinitionFilter) createSqlDataDefinitionFilterClass
                .newInstance();
        log("Using create DDL filter " + createFilter.getClass().getName());

        final SqlDataDefinitionFilter cleanFilter = (SqlDataDefinitionFilter) cleanSqlDataDefinitionFilterClass
                .newInstance();
        log("Using clean DDL filter " + createFilter.getClass().getName());

        final Configuration configuration = (Configuration) hibernateConfigClass.newInstance();
        log("Using configuration " + configuration.getClass().getName());

        if (hibernateConfigFile != null) {
            configuration.configure(hibernateConfigFile);
            log("Using configuration file " + hibernateConfigFile);
        }

        final Class[] dialects = HibernateDialectsCatalog.getDialects();
        for (int i = 0; i < dialects.length; i++) {
            final Class dialectClass = dialects[i];
            final Dialect dialect = (Dialect) dialectClass.newInstance();
            final String dialectClassName = dialectClass.getName();
            final String dialectShortName = dialectClass.getName()
                    .substring(dialectClassName.lastIndexOf('.') + 1);
            final File dialectFile = new File(dialectShortName + destFileExtension);

            final Properties properties = new Properties();
            properties.put(Environment.DIALECT, dialectClass.getName());

            final File createFileFiltered = new File(destDir, createPrefix + dialectFile);
            final File createFileTmp = File.createTempFile(getClass().getName() + "-",
                    "-" + createPrefix + dialectFile);
            createFileTmp.deleteOnExit();

            final File cleanFileFiltered = new File(destDir, cleanPrefix + dialectFile);
            final File cleanFileTmp = File.createTempFile(getClass().getName() + "-",
                    "-" + cleanPrefix + dialectFile);
            cleanFileTmp.deleteOnExit();

            final SchemaExport exporter;
            try {
                // Generates CREATE statements including, quite stupidly, DROP statements which we'll filter later
                exporter = new SchemaExport(configuration, properties);
                exporter.setDelimiter(sqlStmtDelimiter);
                exporter.setOutputFile(createFileTmp.getAbsolutePath());
                exporter.create(false, false);

                // Generates DROP statements only
                exporter.setOutputFile(cleanFileTmp.getAbsolutePath());
                exporter.drop(false, false);
            } catch (HibernateException e) {
                log("Error generating DDL for " + dialectClassName + ": " + e.getMessage());
                continue;
            }

            final SqlDataDefinitionFilterProcessor createFilterProcessor = new SqlDataDefinitionFilterProcessor(
                    createFilter, configuration, dialect, createFileTmp, createFileFiltered, sqlStmtDelimiter);
            createFilterProcessor.execute();

            final SqlDataDefinitionFilterProcessor cleanFilterProcessor = new SqlDataDefinitionFilterProcessor(
                    cleanFilter, configuration, dialect, cleanFileTmp, cleanFileFiltered, sqlStmtDelimiter);
            cleanFilterProcessor.execute();

            log("Generated create " + dialectShortName + " DDL in " + createFileFiltered.getAbsolutePath()
                    + " (" + createFilterProcessor.getRemovedLines() + " lines removed, "
                    + createFilterProcessor.getReplacedLines() + " lines replaced)");
            log("Generated clean " + dialectShortName + " DDL in " + cleanFileFiltered.getAbsolutePath() + " ("
                    + cleanFilterProcessor.getRemovedLines() + " lines removed, "
                    + cleanFilterProcessor.getReplacedLines() + " lines replaced)");
        }
    } catch (Exception e) {
        throw new BuildException(e);
    }
}

From source file:com.mobileman.projecth.InitDbTest.java

License:Apache License

protected void export() throws Exception {

    SchemaExport export = new SchemaExport(sessionFactory.getConfiguration());
    export.setOutputFile("sql.ddl");
    export.setDelimiter(";");
    //export.drop(false, true);
    //export.create(false, true);
}