Example usage for org.hibernate.tool.hbm2ddl SchemaExport drop

List of usage examples for org.hibernate.tool.hbm2ddl SchemaExport drop

Introduction

In this page you can find the example usage for org.hibernate.tool.hbm2ddl SchemaExport drop.

Prototype

public void drop(EnumSet<TargetType> targetTypes, Metadata metadata) 

Source Link

Usage

From source file:ca.myewb.build.CreateDb.java

License:Open Source License

private static void createDb(String postfix) {
    try {/* w  w  w. ja v a2s.c  o  m*/
        System.out.println("Creating fresh database");

        Configuration config = HibernateUtil.getConfiguration(postfix);

        // Set up the schema exporter utility
        SchemaExport sch = new SchemaExport(config);
        sch = sch.setDelimiter(";");

        // Drop and re-create the database
        sch.drop(false, true);
        sch.create(false, true);
    } catch (Exception e) {
        System.err.print("Exception: " + e);
        e.printStackTrace();
    }
}

From source file:com.ah.util.HibernateUtil.java

public static void main(String[] args) {
    init(false);// w w w . java 2  s. c  o m
    System.out.println("Entered HibernateUtil.main");
    System.out.println("# arguments: " + Arrays.asList(args));
    System.out.println("Getting configuration.");

    if ("create".equals(args[0])) {
        if (args.length >= 2) {
            String newUrl = "jdbc:postgresql://localhost/" + args[1];
            configuration.setProperty("hibernate.connection.url", newUrl);

            if (args.length >= 3) {
                try {
                    int i = Integer.parseInt(args[2]);

                    if ((1 == i) && (BeOperateHMCentOSImpl.isExistHomeDomain())) {
                        System.out.println("have tables! need not recreate");

                        return;
                    }
                } catch (Exception ex) {
                    System.out.println(ex);
                }
            }
        }

        SchemaExport schemaExport = new SchemaExport(configuration);

        System.out.println("Creating schema ...");
        schemaExport.create(true, true);
        //   BeSqlProcedure.insertSqlProcedure();
        DBFunction.createHex2Int();
        DBFunction.createDBRollUp();
        DBFunction.createRepoRollUp();
        System.out.println("Create schema finished.");
    } else if ("export".equals(args[0])) {
        SchemaExport schemaExport = new SchemaExport(configuration);

        schemaExport.setOutputFile("schema.ddl");
        schemaExport.setDelimiter(";");
        System.out.println("Exporting schema ...");
        schemaExport.create(true, false);
        System.out.println("Export finished.");
    } else if ("drop".equals(args[0])) {
        SchemaExport schemaExport = new SchemaExport(configuration);

        System.out.println("Dropping schema ...");
        schemaExport.drop(true, true);
        System.out.println("Drop schema finished.");
    } else if ("reset".equals(args[0])) {
        //java HibernateUtil reset jdbc:postgresql://ip_address/db_name
        if (args.length >= 2) {
            configuration.setProperty("hibernate.connection.url", args[1]);
        }
        SchemaExport schemaExport = new SchemaExport(configuration);
        schemaExport.create(true, true);
        DBFunction.createHex2Int();
        DBFunction.createDBRollUp();
        DBFunction.createRepoRollUp();
        System.out.println("execute reset finished.");
    }

    close();
}

From source file:com.fiveamsolutions.nci.commons.test.AbstractHibernateTestCase.java

License:Open Source License

/**
 * In JUnit3x you would normally override the setUp() and add your own functionality locally however, in JUnit4 to
 * override simply define your method and give it the <code>@Before annotation</code>. Doing so will cause that
 * method to be invoked after the parent class's setUp().
 *//*from ww w. j ava2  s. c o  m*/
@SuppressWarnings("unchecked")
@Before
public final void setUp() {

    Transaction tx = HibernateUtil.getHibernateHelper().beginTransaction();
    SchemaExport se = new SchemaExport(HibernateUtil.getHibernateHelper().getConfiguration());
    se.drop(false, true);
    se.create(false, true);
    tx.commit();

    // clean up the hibernate second level cache between runs
    SessionFactory sf = getCurrentSession().getSessionFactory();
    Map<?, EntityPersister> classMetadata = sf.getAllClassMetadata();
    for (EntityPersister ep : classMetadata.values()) {
        if (ep.hasCache()) {
            sf.evictEntity(ep.getCacheAccessStrategy().getRegion().getName());
        }
    }

    Map<?, AbstractCollectionPersister> collMetadata = sf.getAllCollectionMetadata();
    for (AbstractCollectionPersister acp : collMetadata.values()) {
        if (acp.hasCache()) {
            sf.evictCollection(acp.getCacheAccessStrategy().getRegion().getName());
        }
    }
    transaction = HibernateUtil.getHibernateHelper().beginTransaction();

}

From source file:com.fiveamsolutions.nci.commons.util.HibernateHelperTest.java

License:Open Source License

@Before
final public void initDbIfNeeded() throws HibernateException {
    initHelper();/*from   w w w . j a va2  s.  c o  m*/

    hh.openAndBindSession();

    Transaction tx = hh.beginTransaction();
    SchemaExport se = new SchemaExport(hh.getConfiguration());
    se.drop(false, true);
    se.create(false, true);
    tx.commit();

    hh.unbindAndCleanupSession();
}

From source file:com.ironiacorp.persistence.hibernate.GenericHibernateDataSource.java

License:Open Source License

/**
 * Drop the entire database./*  www .  j  a  v  a  2 s . c o m*/
 * 
 * @throws RuntimeException If an error is found when running the DDL script.
 */
public void dropDB() {
    log.debug("Dropping the database");
    log.debug(getDropDDLScript());
    SchemaExport ddl = new SchemaExport(hibernateConfig);
    List exceptions = null;

    ddl.drop(false, true);
    exceptions = handleExceptions(ddl);
    if (!exceptions.isEmpty()) {
        throw new RuntimeException("exception.bootstrap.dropdb", (Exception) ddl.getExceptions().get(0));
    }
}

From source file:com.ironiacorp.persistence.hibernate.HibernateBootstrap.java

License:Open Source License

/**
 * Drop the entire database./*from   w ww . ja v  a  2 s  . c  o  m*/
 * 
 * @throws RuntimeException
 *             If an error is found when running the DDL script.
 */
public void dropDB() {
    log.debug("Dropping the database");
    log.debug(getDropDDLScript());
    SchemaExport ddl = new SchemaExport(config);
    List<Exception> exceptions = null;

    ddl.drop(false, true);
    exceptions = handleExceptions(ddl);
    if (!exceptions.isEmpty()) {
        throw new RuntimeException("exception.bootstrap.dropdb", (Exception) ddl.getExceptions().get(0));
    }
}

From source file:com.medigy.tool.persist.hibernate.ddl.GenerateDDLTask.java

License:Open Source License

public void execute() throws BuildException {
    if (hibernateConfigClass == null)
        throw new BuildException("hibernateConfigClass was not provided.");

    if (destDir == null)
        throw new BuildException("destDir was not provided.");

    try {/*w w  w  . j  a  va  2  s .c o m*/
        final SqlDataDefinitionFilter createFilter = (SqlDataDefinitionFilter) createSqlDataDefinitionFilterClass
                .newInstance();
        log("Using create DDL filter " + createFilter.getClass().getName());

        final SqlDataDefinitionFilter cleanFilter = (SqlDataDefinitionFilter) cleanSqlDataDefinitionFilterClass
                .newInstance();
        log("Using clean DDL filter " + createFilter.getClass().getName());

        final Configuration configuration = (Configuration) hibernateConfigClass.newInstance();
        log("Using configuration " + configuration.getClass().getName());

        if (hibernateConfigFile != null) {
            configuration.configure(hibernateConfigFile);
            log("Using configuration file " + hibernateConfigFile);
        }

        final Class[] dialects = HibernateDialectsCatalog.getDialects();
        for (int i = 0; i < dialects.length; i++) {
            final Class dialectClass = dialects[i];
            final Dialect dialect = (Dialect) dialectClass.newInstance();
            final String dialectClassName = dialectClass.getName();
            final String dialectShortName = dialectClass.getName()
                    .substring(dialectClassName.lastIndexOf('.') + 1);
            final File dialectFile = new File(dialectShortName + destFileExtension);

            final Properties properties = new Properties();
            properties.put(Environment.DIALECT, dialectClass.getName());

            final File createFileFiltered = new File(destDir, createPrefix + dialectFile);
            final File createFileTmp = File.createTempFile(getClass().getName() + "-",
                    "-" + createPrefix + dialectFile);
            createFileTmp.deleteOnExit();

            final File cleanFileFiltered = new File(destDir, cleanPrefix + dialectFile);
            final File cleanFileTmp = File.createTempFile(getClass().getName() + "-",
                    "-" + cleanPrefix + dialectFile);
            cleanFileTmp.deleteOnExit();

            final SchemaExport exporter;
            try {
                // Generates CREATE statements including, quite stupidly, DROP statements which we'll filter later
                exporter = new SchemaExport(configuration, properties);
                exporter.setDelimiter(sqlStmtDelimiter);
                exporter.setOutputFile(createFileTmp.getAbsolutePath());
                exporter.create(false, false);

                // Generates DROP statements only
                exporter.setOutputFile(cleanFileTmp.getAbsolutePath());
                exporter.drop(false, false);
            } catch (HibernateException e) {
                log("Error generating DDL for " + dialectClassName + ": " + e.getMessage());
                continue;
            }

            final SqlDataDefinitionFilterProcessor createFilterProcessor = new SqlDataDefinitionFilterProcessor(
                    createFilter, configuration, dialect, createFileTmp, createFileFiltered, sqlStmtDelimiter);
            createFilterProcessor.execute();

            final SqlDataDefinitionFilterProcessor cleanFilterProcessor = new SqlDataDefinitionFilterProcessor(
                    cleanFilter, configuration, dialect, cleanFileTmp, cleanFileFiltered, sqlStmtDelimiter);
            cleanFilterProcessor.execute();

            log("Generated create " + dialectShortName + " DDL in " + createFileFiltered.getAbsolutePath()
                    + " (" + createFilterProcessor.getRemovedLines() + " lines removed, "
                    + createFilterProcessor.getReplacedLines() + " lines replaced)");
            log("Generated clean " + dialectShortName + " DDL in " + cleanFileFiltered.getAbsolutePath() + " ("
                    + cleanFilterProcessor.getRemovedLines() + " lines removed, "
                    + cleanFilterProcessor.getReplacedLines() + " lines replaced)");
        }
    } catch (Exception e) {
        throw new BuildException(e);
    }
}

From source file:com.persinity.ndt.datamutator.hibernate.HibernateEntityFactory.java

License:Apache License

@Override
public void dropSchema() {
    assertState(configuration != null, "You should call init() before use");

    // hack to not dump the schema init at System.out
    final String res = executeAndCaptureSysOut(new Function<Void, Void>() {
        @Override//  w w  w .  jav  a2 s.c  om
        public Void apply(final Void aVoid) {
            SchemaExport schemaExport = new SchemaExport(configuration);
            schemaExport.drop(true, true);
            return null;
        }
    });
    log.info("{}", res);
}

From source file:com.socialsite.scripts.SchemaCreator.java

License:Open Source License

public static void create() {
    final Configuration cfg = new Configuration().configure();
    final SchemaExport schemaExport = new SchemaExport(cfg);
    schemaExport.setDelimiter(";");
    schemaExport.setFormat(true);/*  ww  w. ja v a2 s .  c  o m*/
    final File f = new File("src/main/scripts");
    f.mkdirs();
    schemaExport.setOutputFile("src/main/scripts/schema.sql");
    schemaExport.drop(true, true);
    schemaExport.create(true, true);
}

From source file:com.yahoo.elide.datastores.hibernate3.HibernateDataStoreSupplier.java

License:Apache License

@Override
public DataStore get() {
    // Add additional checks to our static check mappings map.
    // NOTE: This is a bit hacky. We need to do a major overhaul on our test architecture
    TestCheckMappings.MAPPINGS.put("filterCheck", Filtered.FilterCheck.class);
    TestCheckMappings.MAPPINGS.put("filterCheck3", Filtered.FilterCheck3.class);

    // method to force class initialization
    Configuration configuration = new Configuration();
    try {/*from w ww. j a v  a  2 s .co m*/
        ClassScanner.getAnnotatedClasses(Parent.class.getPackage(), Entity.class)
                .forEach(configuration::addAnnotatedClass);
    } catch (MappingException e) {
        throw new RuntimeException(e);
    }

    SessionFactory sessionFactory = configuration.configure("hibernate.cfg.xml")
            .setProperty(Environment.CURRENT_SESSION_CONTEXT_CLASS, "thread")
            .setProperty(Environment.URL,
                    "jdbc:mysql://localhost:" + System.getProperty("mysql.port", "3306")
                            + "/root?serverTimezone=UTC")
            .setProperty(Environment.USER, "root").setProperty(Environment.PASS, "root").buildSessionFactory();

    // create example tables from beans
    SchemaExport schemaExport = new SchemaExport(configuration).setHaltOnError(true);
    schemaExport.drop(false, true);
    schemaExport.execute(false, true, false, true);

    if (!schemaExport.getExceptions().isEmpty()) {
        throw new RuntimeException(schemaExport.getExceptions().toString());
    }

    return new HibernateStore(sessionFactory, true, ScrollMode.FORWARD_ONLY);
}