List of usage examples for org.hibernate.boot.registry StandardServiceRegistryBuilder StandardServiceRegistryBuilder
public StandardServiceRegistryBuilder()
From source file:com.abcanthur.website.codegenhack.JPADatabase.java
License:Apache License
@SuppressWarnings("serial") @Override//w w w. j a va 2s. c om protected DSLContext create0() { if (connection == null) { String packages = getProperties().getProperty("packages"); if (isBlank(packages)) { packages = ""; log.warn("No packages defined", "It is highly recommended that you provide explicit packages to scan"); } try { connection = DriverManager.getConnection("jdbc:h2:mem:jooq-meta-extensions", "sa", ""); MetadataSources metadata = new MetadataSources(new StandardServiceRegistryBuilder() .applySetting("hibernate.dialect", "org.hibernate.dialect.H2Dialect") .applySetting("javax.persistence.schema-generation-connection", connection) // [#5607] JPADatabase causes warnings - This prevents them .applySetting(AvailableSettings.CONNECTION_PROVIDER, "com.abcanthur.website.codegenhack.CustomConnectionProvider") .build()); ClassPathScanningCandidateComponentProvider scanner = new ClassPathScanningCandidateComponentProvider( true); scanner.addIncludeFilter(new AnnotationTypeFilter(Entity.class)); for (String pkg : packages.split(",")) for (BeanDefinition def : scanner.findCandidateComponents(defaultIfBlank(pkg, "").trim())) metadata.addAnnotatedClass(Class.forName(def.getBeanClassName())); // This seems to be the way to do this in idiomatic Hibernate 5.0 API // See also: http://stackoverflow.com/q/32178041/521799 // SchemaExport export = new SchemaExport((MetadataImplementor) metadata.buildMetadata(), connection); // export.create(true, true); // Hibernate 5.2 broke 5.0 API again. Here's how to do this now: SchemaExport export = new SchemaExport(); export.create(EnumSet.of(TargetType.DATABASE), metadata.buildMetadata()); } catch (Exception e) { throw new DataAccessException("Error while exporting schema", e); } } return DSL.using(connection); }
From source file:com.aegeus.db.DbSessionFactory.java
License:Apache License
protected void build(DbIdentity identity, List<Class> pojoGroup) { Configuration cfg = new Configuration(); cfg.setProperty("hibernate.connection.driver", identity.getDriver()) .setProperty("hibernate.dialect", identity.getDialect()) .setProperty("hibernate.connection.url", identity.getUrl()) .setProperty("hibernate.connection.username", identity.getUsername()) .setProperty("hibernate.connection.password", identity.getPassword()) .setProperty("hibernate.connection.CharSet", "utf-8") .setProperty("hibernate.connection.characterEncoding", "utf-8") .setProperty("hibernate.connection.useUnicode", "true") .setProperty("current_session_context_class", "thread").setProperty("connection.pool_size", "4") .setProperty("hibernate.show_sql", "true"); for (Class pojo : pojoGroup) { cfg.addAnnotatedClass(pojo);//from ww w .j a va 2s . co m } StandardServiceRegistryBuilder builder = new StandardServiceRegistryBuilder(); builder.applySettings(cfg.getProperties()); factory = cfg.buildSessionFactory(builder.build()); }
From source file:com.alfredmuponda.lostandfound.persistence.HibernateUtil.java
private static SessionFactory buildSessionFactory() { try {/*from w w w. jav a 2 s .c o m*/ // Create the SessionFactory from hibernate.cfg.xml Configuration configuration = new Configuration(); configuration.configure("hibernate.cfg.xml"); System.out.println("Hibernate Configuration loaded"); ServiceRegistry serviceRegistry = new StandardServiceRegistryBuilder() .applySettings(configuration.getProperties()).build(); System.out.println("Hibernate serviceRegistry created"); SessionFactory sessionFactory = configuration.buildSessionFactory(serviceRegistry); return sessionFactory; } catch (Throwable ex) { // Make sure you log the exception, as it might be swallowed System.err.println("Initial SessionFactory creation failed." + ex); throw new ExceptionInInitializerError(ex); } }
From source file:com.alfredmuponda.lostandfound.persistence.HibernateUtil.java
private static SessionFactory buildSessionAnnotationFactory() { try {/* w w w. j a v a 2s . c o m*/ // Create the SessionFactory from hibernate.cfg.xml Configuration configuration = new Configuration(); configuration.configure("hibernate-annotation.cfg.xml"); System.out.println("Hibernate Annotation Configuration loaded"); ServiceRegistry serviceRegistry = new StandardServiceRegistryBuilder() .applySettings(configuration.getProperties()).build(); System.out.println("Hibernate Annotation serviceRegistry created"); SessionFactory sessionFactory = configuration.buildSessionFactory(serviceRegistry); return sessionFactory; } catch (Throwable ex) { // Make sure you log the exception, as it might be swallowed System.err.println("Initial SessionFactory creation failed." + ex); throw new ExceptionInInitializerError(ex); } }
From source file:com.alfredmuponda.lostandfound.persistence.HibernateUtil.java
private static SessionFactory buildSessionJavaConfigFactory() { try {/*www . j a v a2 s. c om*/ Configuration configuration = new Configuration(); //Create Properties, can be read from property files too Properties props = new Properties(); props.put("hibernate.connection.driver_class", "com.mysql.jdbc.Driver"); props.put("hibernate.connection.url", "jdbc:mysql://localhost/LostAndFound"); props.put("hibernate.connection.username", "hitrac"); props.put("hibernate.connection.password", "hitrac"); props.put("hibernate.current_session_context_class", "thread"); configuration.setProperties(props); //we can set mapping file or class with annotation //addClass(Employee1.class) will look for resource // com/journaldev/hibernate/model/Employee1.hbm.xml (not good) //configuration.addAnnotatedClass(Employee1.class); ServiceRegistry serviceRegistry = new StandardServiceRegistryBuilder() .applySettings(configuration.getProperties()).build(); System.out.println("Hibernate Java Config serviceRegistry created"); SessionFactory sessionFactory = configuration.buildSessionFactory(serviceRegistry); return sessionFactory; } catch (Throwable ex) { System.err.println("Initial SessionFactory creation failed." + ex); throw new ExceptionInInitializerError(ex); } }
From source file:com.almuradev.backpack.backend.DatabaseManager.java
License:MIT License
public static void init(Path databaseRootPath, String name) { final Configuration configuration = new Configuration(); configuration.setProperty("hibernate.connection.provider_class", CONNECTION_PROVIDER); configuration.setProperty("hibernate.dialect", DIALECT); configuration.setProperty("hibernate.hikari.dataSourceClassName", DRIVER_CLASSPATH); configuration.setProperty("hibernate.hikari.dataSource.url", DATA_SOURCE_PREFIX + databaseRootPath.toString() + File.separator + name + DATA_SOURCE_SUFFIX); configuration.setProperty("hibernate.hbm2ddl.auto", AUTO_SCHEMA_MODE); registerTables(configuration);// w w w . j a va 2 s . c o m sessionFactory = configuration.buildSessionFactory( new StandardServiceRegistryBuilder().applySettings(configuration.getProperties()).build()); }
From source file:com.amalto.core.storage.hibernate.HibernateStorage.java
License:Open Source License
@SuppressWarnings({ "unchecked", "rawtypes" }) @Override/*from w w w.ja va 2s. c o m*/ public synchronized void prepare(MetadataRepository repository, Set<Expression> optimizedExpressions, boolean force, boolean dropExistingData) { if (!force && isPrepared) { return; // No op operation } if (isPrepared) { close(); internalInit(); } if (dataSource == null) { throw new IllegalArgumentException("Datasource is not set."); //$NON-NLS-1$ } // No support for data models including inheritance AND for g* XSD simple types AND fields that start with // X_TALEND_ try { repository.accept(METADATA_CHECKER); userMetadataRepository = repository; } catch (Exception e) { throw new RuntimeException("Exception occurred during unsupported features check.", e); //$NON-NLS-1$ } // Loads additional types for staging area. if (storageType == StorageType.STAGING) { userMetadataRepository = repository.copy(); // See TMDM-6938: prevents staging types to appear in master // storage. userMetadataRepository .load(MetadataRepositoryAdmin.class.getResourceAsStream("stagingInternalTypes.xsd")); //$NON-NLS-1$ } // Create class loader for storage's dynamically created classes. ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); Class<? extends StorageClassLoader> clazz; try { try { clazz = (Class<? extends StorageClassLoader>) Class.forName(ALTERNATE_CLASS_LOADER); } catch (ClassNotFoundException e) { clazz = (Class<? extends StorageClassLoader>) Class.forName(CLASS_LOADER); } Constructor<? extends StorageClassLoader> constructor = clazz.getConstructor(ClassLoader.class, String.class, StorageType.class); storageClassLoader = constructor.newInstance(contextClassLoader, storageName, storageType); storageClassLoader.setDataSourceConfiguration(dataSource); storageClassLoader.generateHibernateConfig(); // Checks if configuration can be generated. } catch (Exception e) { throw new RuntimeException("Could not create storage class loader", e); //$NON-NLS-1$ } if (dropExistingData) { LOGGER.info("Cleaning existing database content."); //$NON-NLS-1$ StorageCleaner cleaner = new JDBCStorageCleaner(new FullTextIndexCleaner()); cleaner.clean(this); } else { LOGGER.info("*NOT* cleaning existing database content."); //$NON-NLS-1$ } if (autoPrepare) { LOGGER.info("Preparing database before schema generation."); //$NON-NLS-1$ StorageInitializer initializer = new JDBCStorageInitializer(); if (initializer.supportInitialization(this)) { if (!initializer.isInitialized(this)) { initializer.initialize(this); } else { LOGGER.info("Database is already prepared."); //$NON-NLS-1$ } } else { LOGGER.info("Datasource is not configured for automatic initialization."); //$NON-NLS-1$ } } else { LOGGER.info("*NOT* preparing database before schema generation."); //$NON-NLS-1$ } try { Thread.currentThread().setContextClassLoader(storageClassLoader); // Mapping of data model types to RDBMS (i.e. 'flatten' representation of types). MetadataRepository internalRepository; try { InternalRepository typeEnhancer = getTypeEnhancer(); internalRepository = userMetadataRepository.accept(typeEnhancer); mappingRepository = typeEnhancer.getMappings(); } catch (Exception e) { throw new RuntimeException("Exception occurred during type mapping creation.", e); //$NON-NLS-1$ } // Set fields to be indexed in database. Set<FieldMetadata> databaseIndexedFields = new HashSet<FieldMetadata>(); switch (storageType) { case MASTER: // Adds indexes on user defined fields for (Expression optimizedExpression : optimizedExpressions) { Collection<FieldMetadata> indexedFields = RecommendedIndexes.get(optimizedExpression); for (FieldMetadata indexedField : indexedFields) { // TMDM-5896: Don't index Composite Key fields if (indexedField instanceof CompoundFieldMetadata) { continue; } // TMDM-5311: Don't index TEXT fields TypeMetadata indexedFieldType = indexedField.getType(); if (!isIndexable(indexedFieldType)) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Ignore index on field '" + indexedField.getName() //$NON-NLS-1$ + "' because value is stored in TEXT."); //$NON-NLS-1$ } continue; } // Go up the containment tree in case containing type is anonymous. ComplexTypeMetadata containingType = indexedField.getContainingType().getEntity(); TypeMapping mapping = mappingRepository.getMappingFromUser(containingType); FieldMetadata databaseField = mapping.getDatabase(indexedField); if (databaseField == null) { LOGGER.error("Could not index field '" + indexedField + "' (" + indexedField.getPath() //$NON-NLS-1$ //$NON-NLS-2$ + "), ignoring index."); //$NON-NLS-1$ continue; } else if (!isIndexable(databaseField.getType())) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Ignore index on field '" + indexedField.getName() //$NON-NLS-1$ + "' because value (in database mapping) is stored in TEXT."); //$NON-NLS-1$ } continue; // Don't take into indexed fields long text fields } // Database specific behaviors switch (dataSource.getDialectName()) { case SQL_SERVER: // TMDM-8144: Don't index field name on SQL Server when size > 900 String maxLengthStr = indexedField.getType() .<String>getData(MetadataRepository.DATA_MAX_LENGTH); if (maxLengthStr == null) { // go up the type inheritance tree to find max length annotation TypeMetadata type = indexedField.getType(); while (!XMLConstants.W3C_XML_SCHEMA_NS_URI.equals(type.getNamespace()) && !type.getSuperTypes().isEmpty()) { type = type.getSuperTypes().iterator().next(); maxLengthStr = type.<String>getData(MetadataRepository.DATA_MAX_LENGTH); if (maxLengthStr != null) { break; } } } if (maxLengthStr != null) { Integer maxLength = Integer.parseInt(maxLengthStr); if (maxLength > 900) { LOGGER.warn("Skip index on field '" + indexedField.getPath() //$NON-NLS-1$ + "' (too long value)."); //$NON-NLS-1$ continue; } } break; case H2: case MYSQL: case POSTGRES: case DB2: case ORACLE_10G: default: // Nothing to do for these databases break; } databaseIndexedFields.add(databaseField); if (!databaseField.getContainingType().isInstantiable()) { Collection<ComplexTypeMetadata> roots = RecommendedIndexes .getRoots(optimizedExpression); for (ComplexTypeMetadata root : roots) { List<FieldMetadata> path = StorageMetadataUtils.path( mappingRepository.getMappingFromUser(root).getDatabase(), databaseField); if (path.size() > 1) { databaseIndexedFields.addAll(path.subList(0, path.size() - 1)); } else { LOGGER.warn("Failed to properly index field '" + databaseField + "'."); //$NON-NLS-1$ //$NON-NLS-2$ } } } } } break; case STAGING: if (!optimizedExpressions.isEmpty()) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Ignoring " + optimizedExpressions.size() //$NON-NLS-1$ + " to optimize (disabled on staging area)."); //$NON-NLS-1$ } } // Adds "staging status" / "staging block key" / "staging task id" as indexed fields for (TypeMapping typeMapping : mappingRepository.getAllTypeMappings()) { ComplexTypeMetadata database = typeMapping.getDatabase(); if (database.hasField(METADATA_STAGING_STATUS)) { databaseIndexedFields.add(database.getField(METADATA_STAGING_STATUS)); } if (database.hasField(METADATA_STAGING_BLOCK_KEY)) { databaseIndexedFields.add(database.getField(METADATA_STAGING_BLOCK_KEY)); } if (database.hasField(METADATA_TASK_ID)) { databaseIndexedFields.add(database.getField(METADATA_TASK_ID)); } } break; case SYSTEM: // Nothing to index on SYSTEM break; } // Don't add FK in indexes if using H2 if (dataSource.getDialectName() == RDBMSDataSource.DataSourceDialect.H2) { Iterator<FieldMetadata> indexedFields = databaseIndexedFields.iterator(); while (indexedFields.hasNext()) { FieldMetadata field = indexedFields.next(); if (field instanceof ReferenceFieldMetadata || field.isKey()) { indexedFields.remove(); // H2 doesn't like indexes on PKs or FKs. } } } switch (dataSource.getDialectName()) { case ORACLE_10G: tableResolver = new OracleStorageTableResolver(databaseIndexedFields, dataSource.getNameMaxLength()); break; default: tableResolver = new StorageTableResolver(databaseIndexedFields, dataSource.getNameMaxLength()); } storageClassLoader.setTableResolver(tableResolver); // Master, Staging and System share same class creator. switch (storageType) { case MASTER: case STAGING: case SYSTEM: hibernateClassCreator = new ClassCreator(storageClassLoader); break; } // Create Hibernate classes (after some modifications to the types). try { internalRepository.accept(hibernateClassCreator); } catch (Exception e) { throw new RuntimeException("Exception occurred during dynamic classes creation.", e); //$NON-NLS-1$ } // Last step: configuration of Hibernate try { // Hibernate needs to have dynamic classes in context class loader during configuration. InputStream ehCacheConfig = storageClassLoader .getResourceAsStream(StorageClassLoader.EHCACHE_XML_CONFIG); if (ehCacheConfig != null) { CacheManager.create(ehCacheConfig); } configuration.configure(StorageClassLoader.HIBERNATE_CONFIG); batchSize = Integer.parseInt(configuration.getProperty(Environment.STATEMENT_BATCH_SIZE)); // Sets default schema for Oracle Properties properties = configuration.getProperties(); if (dataSource.getDialectName() == RDBMSDataSource.DataSourceDialect.ORACLE_10G) { properties.setProperty(Environment.DEFAULT_SCHEMA, dataSource.getUserName()); } // Logs DDL *before* initialization in case initialization fails (useful for debugging). if (LOGGER.isTraceEnabled()) { traceDDL(); } // Customize schema generation according to datasource content. RDBMSDataSource.SchemaGeneration schemaGeneration = dataSource.getSchemaGeneration(); List exceptions = Collections.emptyList(); switch (schemaGeneration) { case CREATE: SchemaExport schemaExport = new SchemaExport(configuration); schemaExport.create(false, true); // Exception may happen during recreation (hibernate may perform statements on tables that does // not exist): these exceptions are supposed to be harmless (but log them to DEBUG just in case). if (LOGGER.isDebugEnabled()) { LOGGER.debug("Exception(s) occurred during schema creation:"); //$NON-NLS-1$ for (Object exceptionObject : schemaExport.getExceptions()) { LOGGER.debug(((Exception) exceptionObject).getMessage()); } } break; case VALIDATE: SchemaValidator schemaValidator = new SchemaValidator(configuration); schemaValidator.validate(); // This is supposed to throw exception on validation issue. break; case UPDATE: SchemaUpdate schemaUpdate = new SchemaUpdate(configuration); schemaUpdate.execute(false, true); exceptions = schemaUpdate.getExceptions(); break; } // Throw an exception if schema update met issue(s). if (!exceptions.isEmpty()) { StringBuilder sb = new StringBuilder(); sb.append("Could not prepare database schema: "); //$NON-NLS-1$ Iterator iterator = exceptions.iterator(); while (iterator.hasNext()) { Exception exception = (Exception) iterator.next(); if (exception instanceof SQLException) { SQLException currentSQLException = (SQLException) exception; while (currentSQLException != null) { sb.append(currentSQLException.getMessage()); sb.append('\n'); currentSQLException = currentSQLException.getNextException(); } } else if (exception != null) { sb.append(exception.getMessage()); } if (iterator.hasNext()) { sb.append('\n'); } } throw new IllegalStateException(sb.toString()); } // Initialize Hibernate Environment.verifyProperties(properties); ConfigurationHelper.resolvePlaceHolders(properties); ServiceRegistry serviceRegistry = new StandardServiceRegistryBuilder().applySettings(properties) .build(); factory = configuration.buildSessionFactory(serviceRegistry); MDMTransactionSessionContext.declareStorage(this, factory); } catch (Exception e) { throw new RuntimeException("Exception occurred during Hibernate initialization.", e); //$NON-NLS-1$ } // All set: set prepared flag to true. isPrepared = true; LOGGER.info("Storage '" + storageName + "' (" + storageType + ") is ready."); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ } catch (Throwable t) { try { // This prevent PermGen OOME in case of multiple failures to start. close(); } catch (Exception e) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Error occurred during clean up following failed prepare", e); //$NON-NLS-1$ } } throw new RuntimeException("Could not prepare '" + storageName + "'.", t); //$NON-NLS-1$ //$NON-NLS-2$ } finally { Thread.currentThread().setContextClassLoader(contextClassLoader); } }
From source file:com.archolding.util.ConnectionHelper.java
public static void setUp() { final StandardServiceRegistry serviceRegistry = new StandardServiceRegistryBuilder().configure().build(); try {/*from w w w. j a v a2 s . c om*/ sessionFactory = new MetadataSources(serviceRegistry).buildMetadata().buildSessionFactory(); } catch (Exception e) { StandardServiceRegistryBuilder.destroy(serviceRegistry); } }
From source file:com.astonish.dropwizard.routing.hibernate.RoutingSessionFactoryFactory.java
License:Apache License
/** * Builds a {@link SessionFactory}/*from w w w . j ava 2 s. c o m*/ * @param bundle * the bundle * @param dbConfig * the dbconfig * @param connectionProvider * the connection provider * @param properties * the hibernate properties * @param entities * the persistent entities * @return {@link SessionFactory} */ private SessionFactory buildSessionFactory(RoutingHibernateBundle<?> bundle, DataSourceFactory dbConfig, ConnectionProvider connectionProvider, Map<String, String> properties, List<Class<?>> entities) { final Configuration configuration = new Configuration(); configuration.setProperty(AvailableSettings.CURRENT_SESSION_CONTEXT_CLASS, "managed"); configuration.setProperty(AvailableSettings.USE_SQL_COMMENTS, Boolean.toString(dbConfig.isAutoCommentsEnabled())); configuration.setProperty(AvailableSettings.USE_GET_GENERATED_KEYS, "true"); configuration.setProperty(AvailableSettings.GENERATE_STATISTICS, "true"); configuration.setProperty(AvailableSettings.USE_REFLECTION_OPTIMIZER, "true"); configuration.setProperty(AvailableSettings.ORDER_UPDATES, "true"); configuration.setProperty(AvailableSettings.ORDER_INSERTS, "true"); configuration.setProperty(AvailableSettings.USE_NEW_ID_GENERATOR_MAPPINGS, "true"); configuration.setProperty("jadira.usertype.autoRegisterUserTypes", "true"); for (Map.Entry<String, String> property : properties.entrySet()) { configuration.setProperty(property.getKey(), property.getValue()); } addAnnotatedClasses(configuration, entities); bundle.configure(configuration); final ServiceRegistry registry = new StandardServiceRegistryBuilder() .addService(ConnectionProvider.class, connectionProvider).applySettings(properties).build(); return configuration.buildSessionFactory(registry); }
From source file:com.baymet.dolu.util.HibernateUtil.java
License:Apache License
public static SessionFactory createSessionFactory() { Configuration configuration = new Configuration(); configuration.configure();/*from w ww . ja va2s . com*/ serviceRegistry = new StandardServiceRegistryBuilder().applySettings(configuration.getProperties()).build(); sessionFactory = configuration.buildSessionFactory(serviceRegistry); return sessionFactory; }