List of usage examples for org.hibernate.dialect Dialect getDialect
@Deprecated public static Dialect getDialect(Properties props) throws HibernateException
From source file:be.fedict.eid.applet.maven.sql.ddl.SQLDDLMojo.java
License:Open Source License
@Override public void execute() throws MojoExecutionException, MojoFailureException { getLog().info("SQL DDL script generator"); File outputFile = new File(this.outputDirectory, this.outputName); getLog().info("Output SQL DDL script file: " + outputFile.getAbsolutePath()); this.outputDirectory.mkdirs(); try {//from w w w . j a va 2 s .co m outputFile.createNewFile(); } catch (IOException e) { throw new MojoExecutionException("I/O error.", e); } for (ArtifactItem artifactItem : this.artifactItems) { getLog().info("artifact: " + artifactItem.getGroupId() + ":" + artifactItem.getArtifactId()); List<Dependency> dependencies = this.project.getDependencies(); String version = null; for (Dependency dependency : dependencies) { if (StringUtils.equals(dependency.getArtifactId(), artifactItem.getArtifactId()) && StringUtils.equals(dependency.getGroupId(), artifactItem.getGroupId())) { version = dependency.getVersion(); break; } } getLog().info("artifact version: " + version); VersionRange versionRange = VersionRange.createFromVersion(version); Artifact artifact = this.artifactFactory.createDependencyArtifact(artifactItem.getGroupId(), artifactItem.getArtifactId(), versionRange, "jar", null, Artifact.SCOPE_COMPILE); try { this.resolver.resolve(artifact, this.remoteRepos, this.local); } catch (ArtifactResolutionException e) { throw new MojoExecutionException("Unable to resolve artifact.", e); } catch (ArtifactNotFoundException e) { throw new MojoExecutionException("Unable to find artifact.", e); } getLog().info("artifact file: " + artifact.getFile().getAbsolutePath()); getLog().info("hibernate dialect: " + this.hibernateDialect); URL artifactUrl; try { artifactUrl = artifact.getFile().toURI().toURL(); } catch (MalformedURLException e) { throw new MojoExecutionException("URL error.", e); } URLClassLoader classLoader = new URLClassLoader(new URL[] { artifactUrl }, this.getClass().getClassLoader()); Thread.currentThread().setContextClassLoader(classLoader); AnnotationDB annotationDb = new AnnotationDB(); try { annotationDb.scanArchives(artifactUrl); } catch (IOException e) { throw new MojoExecutionException("I/O error.", e); } Set<String> classNames = annotationDb.getAnnotationIndex().get(Entity.class.getName()); getLog().info("# JPA entity classes: " + classNames.size()); AnnotationConfiguration configuration = new AnnotationConfiguration(); configuration.setProperty("hibernate.dialect", this.hibernateDialect); Dialect dialect = Dialect.getDialect(configuration.getProperties()); getLog().info("dialect: " + dialect.toString()); for (String className : classNames) { getLog().info("JPA entity: " + className); Class<?> entityClass; try { entityClass = classLoader.loadClass(className); getLog().info("entity class loader: " + entityClass.getClassLoader()); } catch (ClassNotFoundException e) { throw new MojoExecutionException("class not found.", e); } configuration.addAnnotatedClass(entityClass); } SchemaExport schemaExport = new SchemaExport(configuration); schemaExport.setFormat(true); schemaExport.setHaltOnError(true); schemaExport.setOutputFile(outputFile.getAbsolutePath()); schemaExport.setDelimiter(";"); try { getLog().info("SQL DDL script: " + IOUtil.toString(new FileInputStream(outputFile))); } catch (FileNotFoundException e) { throw new MojoExecutionException("file not found.", e); } catch (IOException e) { throw new MojoExecutionException("I/O error.", e); } // operate schemaExport.execute(true, false, false, true); List<Exception> exceptions = schemaExport.getExceptions(); for (Exception exception : exceptions) { getLog().error("exception: " + exception.getMessage()); } } }
From source file:br.gov.jfrj.siga.dp.dao.CpDaoTest.java
License:Open Source License
/** * @param args/* w ww .j av a 2 s . c om*/ * @throws Exception * @throws NoSuchMethodException * @throws InvocationTargetException * @throws IllegalAccessException * @throws SecurityException */ // public static void main(String[] args) throws SecurityException, // IllegalAccessException, InvocationTargetException, // NoSuchMethodException, Exception { // // CpAmbienteEnumBL ambiente = CpAmbienteEnumBL.DESENVOLVIMENTO; // Cp.getInstance().getProp().setPrefixo(ambiente.getSigla()); // // Configuration cfg = CpDao.criarHibernateCfg(ambiente); // HibernateUtil.configurarHibernate(cfg, ""); // // CpDao dao = CpDao.getInstance(); // // System.out.println("Data e hora da ultima atualizao - " // + dao.consultarDataUltimaAtualizacao()); // // dao.iniciarTransacao(); // // dao.importarAcessoTomcat(); // dao.commitTransacao(); // // if (true) // return; // // CpServico ser = dao.consultar(3L, CpServico.class, false); // System.out.println(ser.getSiglaServico() + " - " + ser.getDscServico()); // // DpPessoa pesSigla = new DpPessoa(); // pesSigla.setSesbPessoa("RJ"); // pesSigla.setMatricula(13635L); // DpPessoa pes = dao.consultarPorSigla(pesSigla); // // System.out.println(pes.getSigla() + " - " + pes.getDescricao()); // System.out.println(pes.getCargo().getDescricao()); // System.out.println(pes.getFuncaoConfianca().getDescricao()); // System.out.println(pes.getLotacao().getSigla() + " - " // + pes.getLotacao().getDescricao()); // // DpPessoaDaoFiltro flt = new DpPessoaDaoFiltro(); // flt.setSigla(LOGIN); // System.out.print("consultarQuantidade: "); // System.out.println(dao.consultarQuantidade((DaoFiltro) flt)); // // CpDao.freeInstance(); // } public static void printSchema(SessionFactory fact, Configuration cfg) { Dialect dialect = Dialect.getDialect(cfg.getProperties()); // printDropSchemaScript(cfg, dialect); // printSchemaCreationScript(cfg, dialect); printSchemaUpdateScript(fact, cfg, dialect); }
From source file:br.rj.jfrj.siga.dp.CpDaoTest.java
License:Open Source License
public static void printSchema(SessionFactory fact, Configuration cfg) { Dialect dialect = Dialect.getDialect(cfg.getProperties()); // printDropSchemaScript(cfg, dialect); // printSchemaCreationScript(cfg, dialect); printSchemaUpdateScript(fact, cfg, dialect); }
From source file:com.aan.girsang.server.launcher.GenerateDatabase.java
public static void main(String[] args) throws SQLException { AbstractApplicationContext ctx = new ClassPathXmlApplicationContext("applicationContext.xml"); DataSource dataSource = (DataSource) ctx.getBean("dataSource"); Configuration cfg = new AnnotationConfiguration().configure("hibernate.cfg.xml") .setProperty("hibernate.dialect", "org.hibernate.dialect.H2Dialect"); try (Connection conn = dataSource.getConnection()) { new SchemaExport(cfg, conn).create(true, true); cfg.generateSchemaCreationScript(Dialect.getDialect(cfg.getProperties())); SchemaExport export = new SchemaExport(cfg, conn); export.create(true, true);/*w w w . j a v a 2 s. c om*/ conn.close(); } ctx.registerShutdownHook(); }
From source file:com.aangirsang.girsang.toko.GenerateDatabase.java
public static void main(String[] args) throws SQLException { AbstractApplicationContext ctx = new ClassPathXmlApplicationContext("classpath:applicationContext.xml"); DataSource dataSource = (DataSource) ctx.getBean("dataSource"); Configuration cfg = new AnnotationConfiguration().configure("hibernate.cfg.xml") .setProperty("hibernate.dialect", "org.hibernate.dialect.H2Dialect"); try (Connection conn = dataSource.getConnection()) { new SchemaExport(cfg, conn).create(true, true); cfg.generateSchemaCreationScript(Dialect.getDialect(cfg.getProperties())); SchemaExport export = new SchemaExport(cfg, conn); export.create(true, true);//from w w w . j a va2s . c om conn.close(); } ctx.registerShutdownHook(); }
From source file:com.atolcd.alfresco.repo.patch.SchemaUpgradeScriptPatch.java
License:Open Source License
private void executeScriptUrl(Configuration cfg, Connection connection, String scriptUrl) throws Exception { Dialect dialect = Dialect.getDialect(cfg.getProperties()); String dialectStr = dialect.getClass().getSimpleName(); InputStream scriptInputStream = getScriptInputStream(dialect.getClass(), scriptUrl); // check that it exists if (scriptInputStream == null) { throw AlfrescoRuntimeException.create(ERR_SCRIPT_NOT_FOUND, scriptUrl); }/*www .j a v a2 s. c o m*/ // write the script to a temp location for future and failure reference File tempFile = null; try { tempFile = TempFileProvider.createTempFile("AlfrescoSchema-" + dialectStr + "-Update-", ".sql"); ContentWriter writer = new FileContentWriter(tempFile); writer.putContent(scriptInputStream); } finally { try { scriptInputStream.close(); } catch (Throwable e) { } // usually a duplicate close } // now execute it String dialectScriptUrl = scriptUrl.replaceAll(PLACEHOLDER_SCRIPT_DIALECT, dialect.getClass().getName()); // Replace the script placeholders executeScriptFile(cfg, connection, tempFile, dialectScriptUrl); }
From source file:com.atolcd.alfresco.repo.patch.SchemaUpgradeScriptPatch.java
License:Open Source License
/** * @param cfg//from w w w . j ava2s . c o m * the Hibernate configuration * @param connection * the DB connection to use * @param scriptFile * the file containing the statements * @param scriptUrl * the URL of the script to report. If this is null, the script * is assumed to have been auto-generated. */ @SuppressWarnings("resource") private void executeScriptFile(Configuration cfg, Connection connection, File scriptFile, String scriptUrl) throws Exception { final Dialect dialect = Dialect.getDialect(cfg.getProperties()); StringBuilder executedStatements = executedStatementsThreadLocal.get(); if (executedStatements == null) { // Dump the normalized, pre-upgrade Alfresco schema. We keep the // file for later reporting. /* * xmlPreSchemaOutputFile = dumpSchema(this.dialect, * TempFileProvider .createTempFile( "AlfrescoSchema-" + * this.dialect.getClass().getSimpleName() + "-", * "-Startup.xml").getPath(), * "Failed to dump normalized, pre-upgrade schema to file."); */ // There is no lock at this stage. This process can fall out if the // lock can't be applied. // setBootstrapStarted(connection); executedStatements = new StringBuilder(8094); executedStatementsThreadLocal.set(executedStatements); } if (scriptUrl == null) { LogUtil.info(logger, MSG_EXECUTING_GENERATED_SCRIPT, scriptFile); } else { LogUtil.info(logger, MSG_EXECUTING_COPIED_SCRIPT, scriptFile, scriptUrl); } InputStream scriptInputStream = new FileInputStream(scriptFile); BufferedReader reader = new BufferedReader(new InputStreamReader(scriptInputStream, "UTF-8")); try { int line = 0; // loop through all statements StringBuilder sb = new StringBuilder(1024); String fetchVarName = null; String fetchColumnName = null; boolean doBatch = false; int batchUpperLimit = 0; int batchSize = 1; Map<String, Object> varAssignments = new HashMap<String, Object>(13); // Special variable assignments: if (dialect instanceof PostgreSQLDialect) { // Needs 1/0 for true/false varAssignments.put("true", "true"); varAssignments.put("false", "false"); varAssignments.put("TRUE", "TRUE"); varAssignments.put("FALSE", "FALSE"); } else { // Needs true/false as strings varAssignments.put("true", "1"); varAssignments.put("false", "0"); varAssignments.put("TRUE", "1"); varAssignments.put("FALSE", "0"); } while (true) { String sqlOriginal = reader.readLine(); line++; if (sqlOriginal == null) { // nothing left in the file break; } // trim it String sql = sqlOriginal.trim(); // Check for variable assignment if (sql.startsWith("--ASSIGN:")) { if (sb.length() > 0) { // This can only be set before a new SQL statement throw AlfrescoRuntimeException.create(ERR_STATEMENT_VAR_ASSIGNMENT_BEFORE_SQL, (line - 1), scriptUrl); } String assignStr = sql.substring(9, sql.length()); String[] assigns = assignStr.split("="); if (assigns.length != 2 || assigns[0].length() == 0 || assigns[1].length() == 0) { throw AlfrescoRuntimeException.create(ERR_STATEMENT_VAR_ASSIGNMENT_FORMAT, (line - 1), scriptUrl); } fetchVarName = assigns[0]; fetchColumnName = assigns[1]; continue; } // Handle looping control else if (sql.startsWith("--FOREACH")) { // --FOREACH table.column batch.size.property String[] args = sql.split("[ \\t]+"); int sepIndex; if (args.length == 3 && (sepIndex = args[1].indexOf('.')) != -1) { doBatch = true; // Select the upper bound of the table column String stmt = "SELECT MAX(" + args[1].substring(sepIndex + 1) + ") AS upper_limit FROM " + args[1].substring(0, sepIndex); Object fetchedVal = executeStatement(connection, stmt, "upper_limit", false, line, scriptFile); if (fetchedVal instanceof Number) { batchUpperLimit = ((Number) fetchedVal).intValue(); // Read the batch size from the named property String batchSizeString = globalProperties.getProperty(args[2]); // Fall back to the default property if (batchSizeString == null) { batchSizeString = globalProperties.getProperty(PROPERTY_DEFAULT_BATCH_SIZE); } batchSize = batchSizeString == null ? 10000 : Integer.parseInt(batchSizeString); } } continue; } // Allow transaction delineation else if (sql.startsWith("--BEGIN TXN")) { connection.setAutoCommit(false); continue; } else if (sql.startsWith("--END TXN")) { connection.commit(); connection.setAutoCommit(true); continue; } // Check for comments if (sql.length() == 0 || sql.startsWith("--") || sql.startsWith("//") || sql.startsWith("/*")) { if (sb.length() > 0) { // we have an unterminated statement throw AlfrescoRuntimeException.create(ERR_STATEMENT_TERMINATOR, (line - 1), scriptUrl); } // there has not been anything to execute - it's just a // comment line continue; } // have we reached the end of a statement? boolean execute = false; boolean optional = false; if (sql.endsWith(";")) { sql = sql.substring(0, sql.length() - 1); execute = true; optional = false; } else if (sql.endsWith("(optional)") || sql.endsWith("(OPTIONAL)")) { // Get the end of statement int endIndex = sql.lastIndexOf(';'); if (endIndex > -1) { sql = sql.substring(0, endIndex); execute = true; optional = true; } else { // Ends with "(optional)" but there is no semi-colon. // Just take it at face value and probably fail. } } // Add newline if (sb.length() > 0) { sb.append("\n"); } // Add leading whitespace for formatting int whitespaceCount = sqlOriginal.indexOf(sql); for (int i = 0; i < whitespaceCount; i++) { sb.append(" "); } // append to the statement being built up sb.append(sql); // execute, if required if (execute) { // Now substitute and execute the statement the appropriate // number of times String unsubstituted = sb.toString(); for (int lowerBound = 0; lowerBound <= batchUpperLimit; lowerBound += batchSize) { sql = unsubstituted; // Substitute in the next pair of range parameters if (doBatch) { varAssignments.put("LOWERBOUND", String.valueOf(lowerBound)); varAssignments.put("UPPERBOUND", String.valueOf(lowerBound + batchSize - 1)); } // Perform variable replacement using the ${var} format for (Map.Entry<String, Object> entry : varAssignments.entrySet()) { String var = entry.getKey(); Object val = entry.getValue(); sql = sql.replaceAll("\\$\\{" + var + "\\}", val.toString()); } // Handle the 0/1 values that PostgreSQL doesn't // translate to TRUE if (this.dialect != null && this.dialect instanceof PostgreSQLDialect) { sql = sql.replaceAll("\\$\\{TRUE\\}", "TRUE"); } else { sql = sql.replaceAll("\\$\\{TRUE\\}", "1"); } if (this.dialect != null && this.dialect instanceof MySQLInnoDBDialect) { // note: enable bootstrap on MySQL 5.5 (eg. for // auto-generated SQL, such as JBPM) sql = sql.replaceAll("(?i)TYPE=InnoDB", "ENGINE=InnoDB"); } Object fetchedVal = executeStatement(connection, sql, fetchColumnName, optional, line, scriptFile); if (fetchVarName != null && fetchColumnName != null) { varAssignments.put(fetchVarName, fetchedVal); } } sb.setLength(0); fetchVarName = null; fetchColumnName = null; doBatch = false; batchUpperLimit = 0; batchSize = 1; } } } finally { try { reader.close(); } catch (Throwable e) { } try { scriptInputStream.close(); } catch (Throwable e) { } } }
From source file:com.cyclopsgroup.tornado.hibernate.taglib.CreateTablesTag.java
License:CDDL license
/** * Override method getSqls in class CreateTablesTag * * @see com.cyclopsgroup.tornado.hibernate.taglib.ExecuteSqlsTagBase#getSqls(com.cyclopsgroup.tornado.hibernate.taglib.HibernateTag) */// w w w.ja va 2 s. co m protected String[] getSqls(HibernateTag hibernate) throws Exception { Dialect dialect = Dialect.getDialect(hibernate.getHibernateConfiguration().getProperties()); return hibernate.getHibernateConfiguration().generateSchemaCreationScript(dialect); }
From source file:com.cyclopsgroup.tornado.hibernate.taglib.DropTablesTag.java
License:CDDL license
/** * Override method getSqls in class DropTablesTag * * @see com.cyclopsgroup.tornado.hibernate.taglib.ExecuteSqlsTagBase#getSqls(com.cyclopsgroup.tornado.hibernate.taglib.HibernateTag) *//*from w w w . j a va 2 s. c o m*/ protected String[] getSqls(HibernateTag hibernate) throws Exception { Dialect dialect = Dialect.getDialect(hibernate.getHibernateConfiguration().getProperties()); return hibernate.getHibernateConfiguration().generateDropSchemaScript(dialect); }
From source file:com.cyclopsgroup.tornado.hibernate.taglib.UpdateSchemaTag.java
License:CDDL license
/** * Override method getSqls in class UpdateSchemaTag * * @see com.cyclopsgroup.tornado.hibernate.taglib.ExecuteSqlsTagBase#getSqls(com.cyclopsgroup.tornado.hibernate.taglib.HibernateTag) */// ww w.java2 s . c o m protected String[] getSqls(HibernateTag hibernate) throws Exception { Dialect dialect = Dialect.getDialect(hibernate.getHibernateConfiguration().getProperties()); Connection dbcon = hibernate.getConnection(); DatabaseMetadata dm = new DatabaseMetadata(dbcon, dialect); return hibernate.getHibernateConfiguration().generateSchemaUpdateScript(dialect, dm); }