List of usage examples for org.hibernate.stat Statistics getEntityLoadCount
long getEntityLoadCount();
From source file:com.francetelecom.clara.cloud.commons.HibernateStatsHelper.java
License:Apache License
/** * Ensure no regression higher than 5%/*w w w . j a va 2 s .co m*/ * @param assertDuration if true durations are verified, if false durations are not verified */ private static void checkStats(Map<HibernateStatsReferenceType, Long> refs, long duration, Statistics stats, boolean assertDuration) throws ObjectNotFoundException, MalformedURLException { List<AssertionError> failedAsserts = new ArrayList<AssertionError>(); // Reference values: these must be updated when you optimize your code // or if new values are explained and normal. final long DURATION = refs.get(HibernateStatsReferenceType.DURATION); final int QUERY_COUNT = refs.get(HibernateStatsReferenceType.QUERY_COUNT).intValue(); final int QUERY_MAX_TIME_MS = refs.get(HibernateStatsReferenceType.QUERY_MAX_TIME_MS).intValue(); final int ENTITY_FETCH_COUNT = refs.get(HibernateStatsReferenceType.ENTITY_FETCH_COUNT).intValue(); final int ENTITY_LOAD_COUNT = refs.get(HibernateStatsReferenceType.ENTITY_LOAD_COUNT).intValue(); final int ENTITY_INSERT_COUNT = refs.get(HibernateStatsReferenceType.ENTITY_INSERT_COUNT).intValue(); final int ENTITY_DELETE_COUNT = refs.get(HibernateStatsReferenceType.ENTITY_DELETE_COUNT).intValue(); final int ENTITY_UPDATE_COUNT = refs.get(HibernateStatsReferenceType.ENTITY_UPDATE_COUNT).intValue(); final int COLLECTION_FETCH_COUNT = refs.get(HibernateStatsReferenceType.COLLECTION_FETCH_COUNT).intValue(); final int COLLECTION_LOAD_COUNT = refs.get(HibernateStatsReferenceType.COLLECTION_LOAD_COUNT).intValue(); final int COLLECTION_RECREATE_COUNT = refs.get(HibernateStatsReferenceType.COLLECTION_RECREATE_COUNT) .intValue(); final int COLLECTION_REMOVE_COUNT = refs.get(HibernateStatsReferenceType.COLLECTION_REMOVE_COUNT) .intValue(); final int COLLECTION_UPDATE_COUNT = refs.get(HibernateStatsReferenceType.COLLECTION_UPDATE_COUNT) .intValue(); // The number of completed transactions (failed and successful) must // match number of transactions completed without failure preAssertEquals("There are transaction failures", stats.getTransactionCount(), stats.getSuccessfulTransactionCount(), failedAsserts); // Total number of queries executed. preAssertTrue( "Total number of queries executed increased more than 5% (ref=" + QUERY_COUNT + "): " + stats.getQueryExecutionCount(), stats.getQueryExecutionCount() <= (QUERY_COUNT * 1.05), failedAsserts); if (stats.getQueryExecutionCount() < (QUERY_COUNT * 0.95)) logger.warn("/!\\ You should update reference value QUERY_COUNT (ref=" + QUERY_COUNT + ") to " + stats.getQueryExecutionCount()); preAssertTrue( "ENTITY_DELETE_COUNT increased more than 5% (ref=" + ENTITY_DELETE_COUNT + "): " + stats.getEntityDeleteCount(), stats.getEntityDeleteCount() <= (ENTITY_DELETE_COUNT * 1.05), failedAsserts); if (stats.getEntityDeleteCount() < (ENTITY_DELETE_COUNT * 0.95)) logger.warn("/!\\ You should update reference value ENTITY_DELETE_COUNT (ref=" + ENTITY_DELETE_COUNT + ") to " + stats.getEntityDeleteCount()); preAssertTrue( "ENTITY_UPDATE_COUNT increased more than 5% (ref=" + ENTITY_UPDATE_COUNT + "): " + stats.getEntityUpdateCount(), stats.getEntityUpdateCount() <= (ENTITY_UPDATE_COUNT * 1.05), failedAsserts); if (stats.getEntityUpdateCount() < (ENTITY_UPDATE_COUNT * 0.95)) logger.warn("/!\\ You should update reference value ENTITY_UPDATE_COUNT (ref=" + ENTITY_UPDATE_COUNT + ") to " + stats.getEntityUpdateCount()); if (stats.getCollectionRecreateCount() < (COLLECTION_RECREATE_COUNT * 0.95)) logger.warn("/!\\ You should update reference value COLLECTION_RECREATE_COUNT (ref=" + COLLECTION_RECREATE_COUNT + ") to " + stats.getCollectionRecreateCount()); preAssertTrue( "COLLECTION_REMOVE_COUNT increased more than 5% (ref=" + COLLECTION_REMOVE_COUNT + "): " + stats.getCollectionRemoveCount(), stats.getCollectionRemoveCount() <= (COLLECTION_REMOVE_COUNT * 1.05), failedAsserts); if (stats.getCollectionRemoveCount() < (COLLECTION_REMOVE_COUNT * 0.95)) logger.warn("/!\\ You should update reference value COLLECTION_REMOVE_COUNT (ref=" + COLLECTION_REMOVE_COUNT + ") to " + stats.getCollectionRemoveCount()); preAssertTrue( "COLLECTION_UPDATE_COUNT increased more than 5% (ref=" + COLLECTION_UPDATE_COUNT + "): " + stats.getCollectionUpdateCount(), stats.getCollectionUpdateCount() <= (COLLECTION_UPDATE_COUNT * 1.05), failedAsserts); if (stats.getCollectionUpdateCount() < (COLLECTION_UPDATE_COUNT * 0.95)) logger.warn("/!\\ You should update reference value COLLECTION_UPDATE_COUNT (ref=" + COLLECTION_UPDATE_COUNT + ") to " + stats.getCollectionUpdateCount()); // Entities statistics preAssertTrue( "ENTITY_FETCH_COUNT increased more than 5% (ref=" + ENTITY_FETCH_COUNT + "): " + stats.getEntityFetchCount(), stats.getEntityFetchCount() < (ENTITY_FETCH_COUNT * 1.05), failedAsserts); if (stats.getEntityFetchCount() < (ENTITY_FETCH_COUNT * 0.95)) logger.warn("/!\\ You should update reference value ENTITY_FETCH_COUNT (ref=" + ENTITY_FETCH_COUNT + ") to " + stats.getEntityFetchCount()); preAssertTrue( "ENTITY_LOAD_COUNT increased more than 5% (ref=" + ENTITY_LOAD_COUNT + "): " + stats.getEntityLoadCount(), stats.getEntityLoadCount() <= (ENTITY_LOAD_COUNT * 1.05), failedAsserts); if (stats.getEntityLoadCount() < (ENTITY_LOAD_COUNT * 0.95)) logger.warn("/!\\ You should update reference value ENTITY_LOAD_COUNT (ref=" + ENTITY_LOAD_COUNT + ") to " + stats.getEntityLoadCount()); preAssertTrue( "ENTITY_INSERT_COUNT increased more than 5% (ref=" + ENTITY_INSERT_COUNT + "): " + stats.getEntityInsertCount(), stats.getEntityInsertCount() <= (ENTITY_INSERT_COUNT * 1.05), failedAsserts); if (stats.getEntityInsertCount() < (ENTITY_INSERT_COUNT * 0.95)) logger.warn("/!\\ You should update reference value ENTITY_INSERT_COUNT (ref=" + ENTITY_INSERT_COUNT + ") to " + stats.getEntityInsertCount()); // Collections statistics preAssertTrue( "COLLECTION_FETCH_COUNT increased more than 5% (ref=" + COLLECTION_FETCH_COUNT + "): " + stats.getCollectionFetchCount(), stats.getCollectionFetchCount() <= (COLLECTION_FETCH_COUNT * 1.05), failedAsserts); if (stats.getCollectionFetchCount() < (COLLECTION_FETCH_COUNT * 0.95)) logger.warn("/!\\ You should update reference value COLLECTION_FETCH_COUNT (ref=" + COLLECTION_FETCH_COUNT + ") to " + stats.getCollectionFetchCount()); preAssertTrue( "COLLECTION_LOAD_COUNT increased more than 5% (ref=" + COLLECTION_LOAD_COUNT + "): " + stats.getCollectionLoadCount(), stats.getCollectionLoadCount() <= (COLLECTION_LOAD_COUNT * 1.05), failedAsserts); if (stats.getCollectionLoadCount() < (COLLECTION_LOAD_COUNT * 0.95)) logger.warn("/!\\ You should update reference value COLLECTION_LOAD_COUNT (ref=" + COLLECTION_LOAD_COUNT + ") to " + stats.getCollectionLoadCount()); preAssertTrue( "COLLECTION_RECREATE_COUNT increased more than 5% (ref=" + COLLECTION_RECREATE_COUNT + "): " + stats.getCollectionRecreateCount(), stats.getCollectionRecreateCount() <= (COLLECTION_RECREATE_COUNT * 1.05), failedAsserts); if (assertDuration) { // Time of the slowest query executed. preAssertTrue( "Time of the slowest query executed increased more than 50% (ref=" + QUERY_MAX_TIME_MS + "): " + stats.getQueryExecutionMaxTime(), stats.getQueryExecutionMaxTime() <= (QUERY_MAX_TIME_MS * 1.50), failedAsserts); if (stats.getQueryExecutionMaxTime() < (QUERY_MAX_TIME_MS * 0.50)) logger.warn("/!\\ You should update reference value QUERY_MAX_TIME_MS (ref=" + QUERY_MAX_TIME_MS + ") to " + stats.getQueryExecutionMaxTime()); // Check test duration preAssertTrue("Total duration of the test increased more than 5% (ref=" + DURATION + "): " + duration, duration < (DURATION * 1.05), failedAsserts); if (duration <= (DURATION * 0.85)) logger.warn( "/!\\ You should update reference value DURATION (ref=" + DURATION + ") to " + duration); } StringBuffer formattedFailedAsserts = new StringBuffer(); for (AssertionError failedAssert : failedAsserts) { formattedFailedAsserts.append(failedAssert.getMessage()); formattedFailedAsserts.append("\n"); } String advice = "Analyse the code with your favorite profiler, then see where performances decrease and optimize the code. If you consider this new value as 'normal' then set a new reference value."; assertTrue(failedAsserts.size() + " Hibernate stats violations: \n" + formattedFailedAsserts.toString() + advice, failedAsserts.isEmpty()); }
From source file:com.francetelecom.clara.cloud.scalability.helper.StatisticsHelper.java
License:Apache License
/** * Log the current statistics// w ww . jav a 2s . c o m * * @param stats hibernate statistics */ public static void logStats(Statistics stats) { logger.info("Database statistics"); logger.info(" Number of connection requests : " + stats.getConnectCount()); logger.info(" Session flushes : " + stats.getFlushCount()); logger.info(" Transactions : " + stats.getTransactionCount()); logger.info(" Successful transactions : " + stats.getSuccessfulTransactionCount()); logger.info(" Sessions opened : " + stats.getSessionOpenCount()); logger.info(" Sessions closed : " + stats.getSessionCloseCount()); logger.info(" Queries executed : " + stats.getQueryExecutionCount()); logger.info(" Max query time : " + stats.getQueryExecutionMaxTime()); logger.info(" Max time query : " + stats.getQueryExecutionMaxTimeQueryString()); logger.info("Collection statistics"); logger.info(" Collections fetched : " + stats.getCollectionFetchCount()); logger.info(" Collections loaded : " + stats.getCollectionLoadCount()); logger.info(" Collections rebuilt : " + stats.getCollectionRecreateCount()); logger.info(" Collections batch deleted : " + stats.getCollectionRemoveCount()); logger.info(" Collections batch updated : " + stats.getCollectionUpdateCount()); logger.info("Object statistics"); logger.info(" Objects fetched : " + stats.getEntityFetchCount()); logger.info(" Objects loaded : " + stats.getEntityLoadCount()); logger.info(" Objects inserted : " + stats.getEntityInsertCount()); logger.info(" Objects deleted : " + stats.getEntityDeleteCount()); logger.info(" Objects updated : " + stats.getEntityUpdateCount()); logger.info("Cache statistics"); double chit = stats.getQueryCacheHitCount(); double cmiss = stats.getQueryCacheMissCount(); logger.info(" Cache hit count : " + chit); logger.info(" Cache miss count : " + cmiss); logger.info(" Cache hit ratio : " + (chit / (chit + cmiss))); String[] entityNames = stats.getEntityNames(); Arrays.sort(entityNames); for (String entityName : entityNames) { Class<?> entityClass = null; try { entityClass = Class.forName(entityName); } catch (ClassNotFoundException e) { logger.error("Unable to load class for " + entityName, e); } entityStats(stats, entityClass); } //Uncomment these lines to trace every query (can generate a lot of logs) String[] qs = stats.getQueries(); for (String q : qs) { queryStats(stats, q); } String[] slcrn = stats.getSecondLevelCacheRegionNames(); for (String s : slcrn) { secondLevelStats(stats, s); } }
From source file:com.hazelcast.hibernate.LocalRegionFactoryDefaultTest.java
License:Open Source License
@Test public void testEntity() { final HazelcastInstance hz = getHazelcastInstance(sf); assertNotNull(hz);/*ww w. j av a2 s .co m*/ final int count = 100; final int childCount = 3; insertDummyEntities(count, childCount); List<DummyEntity> list = new ArrayList<DummyEntity>(count); Session session = sf.openSession(); try { for (int i = 0; i < count; i++) { DummyEntity e = (DummyEntity) session.get(DummyEntity.class, (long) i); session.evict(e); list.add(e); } } finally { session.close(); } session = sf.openSession(); Transaction tx = session.beginTransaction(); try { for (DummyEntity dummy : list) { dummy.setDate(new Date()); session.update(dummy); } tx.commit(); } catch (Exception e) { tx.rollback(); e.printStackTrace(); } finally { session.close(); } Statistics stats = sf.getStatistics(); assertEquals((childCount + 1) * count, stats.getEntityInsertCount()); // twice put of entity and properties (on load and update) and once put of collection assertEquals((childCount + 1) * count * 2 + count, stats.getSecondLevelCachePutCount()); assertEquals(childCount * count, stats.getEntityLoadCount()); assertEquals(count, stats.getSecondLevelCacheHitCount()); // collection cache miss assertEquals(count, stats.getSecondLevelCacheMissCount()); stats.logSummary(); }
From source file:com.hazelcast.hibernate.RegionFactoryDefaultTest.java
License:Open Source License
@Test public void testEntity() { final HazelcastInstance hz = getHazelcastInstance(sf); assertNotNull(hz);/*from ww w. j ava 2 s.co m*/ final int count = 100; final int childCount = 3; insertDummyEntities(count, childCount); List<DummyEntity> list = new ArrayList<DummyEntity>(count); Session session = sf.openSession(); try { for (int i = 0; i < count; i++) { DummyEntity e = (DummyEntity) session.get(DummyEntity.class, (long) i); session.evict(e); list.add(e); } } finally { session.close(); } session = sf.openSession(); Transaction tx = session.beginTransaction(); try { for (DummyEntity dummy : list) { dummy.setDate(new Date()); session.update(dummy); } tx.commit(); } catch (Exception e) { tx.rollback(); e.printStackTrace(); } finally { session.close(); } Statistics stats = sf.getStatistics(); Map<?, ?> cache = hz.getMap(DummyEntity.class.getName()); Map<?, ?> propCache = hz.getMap(DummyProperty.class.getName()); Map<?, ?> propCollCache = hz.getMap(DummyEntity.class.getName() + ".properties"); assertEquals((childCount + 1) * count, stats.getEntityInsertCount()); // twice put of entity and properties (on load and update) and once put of collection // TODO: fix next assertion -> // assertEquals((childCount + 1) * count * 2, stats.getSecondLevelCachePutCount()); assertEquals(childCount * count, stats.getEntityLoadCount()); assertEquals(count, stats.getSecondLevelCacheHitCount()); // collection cache miss assertEquals(count, stats.getSecondLevelCacheMissCount()); assertEquals(count, cache.size()); assertEquals(count * childCount, propCache.size()); assertEquals(count, propCollCache.size()); sf.getCache().evictEntityRegion(DummyEntity.class); sf.getCache().evictEntityRegion(DummyProperty.class); assertEquals(0, cache.size()); assertEquals(0, propCache.size()); stats.logSummary(); }
From source file:com.hazelcast.hibernate.RegionFactoryDefaultTest.java
License:Open Source License
@Test public void testQuery() { final int entityCount = 10; final int queryCount = 3; insertDummyEntities(entityCount);//from ww w. j a v a 2s . c o m List<DummyEntity> list = null; for (int i = 0; i < queryCount; i++) { list = executeQuery(sf); assertEquals(entityCount, list.size()); } assertNotNull(list); Session session = sf.openSession(); Transaction tx = session.beginTransaction(); try { for (DummyEntity dummy : list) { session.delete(dummy); } tx.commit(); } catch (Exception e) { tx.rollback(); e.printStackTrace(); } finally { session.close(); } Statistics stats = sf.getStatistics(); assertEquals(1, stats.getQueryCachePutCount()); assertEquals(1, stats.getQueryCacheMissCount()); assertEquals(queryCount - 1, stats.getQueryCacheHitCount()); assertEquals(1, stats.getQueryExecutionCount()); assertEquals(entityCount, stats.getEntityInsertCount()); // FIXME // HazelcastRegionFactory puts into L2 cache 2 times; 1 on insert, 1 on query execution // assertEquals(entityCount, stats.getSecondLevelCachePutCount()); assertEquals(entityCount, stats.getEntityLoadCount()); assertEquals(entityCount, stats.getEntityDeleteCount()); assertEquals(entityCount * (queryCount - 1) * 2, stats.getSecondLevelCacheHitCount()); // collection cache miss assertEquals(entityCount, stats.getSecondLevelCacheMissCount()); stats.logSummary(); }
From source file:com.javaetmoi.core.persistence.hibernate.TestLazyLoadingUtil.java
License:Apache License
/** * Tests the method {@link LazyLoadingUtil#deepHydrate(org.hibernate.Session, Object) //w ww . j a v a2 s. co m **/ @Test public void deepResolveEmployee() { // Loading an entity and hydrating its graph is done in a single transaction Employee dbJames = transactionTemplate.execute(new TransactionCallback<Employee>() { public Employee doInTransaction(TransactionStatus status) { Employee employee = hibernateTemplate.get(Employee.class, 1); return LazyLoadingUtil.deepHydrate(hibernateTemplate.getSessionFactory().getCurrentSession(), employee); } }); // Assertions // - LazyInitializationException not thrown assertNotNull("No LazyInitializationException should be thrown", dbJames.getAddresses().get("home")); // - Addresses assertEquals("Same addresses size", james.getAddresses().size(), dbJames.getAddresses().size()); Address dbJamesParis = dbJames.getAddresses().get(paris.getType()); LOGGER.debug("James Paris address toString(): {}", dbJamesParis.toString()); ReflectionAssert.assertReflectionEquals("Comparing James Paris address with ReflectionAssert", paris, dbJamesParis, ReflectionComparatorMode.LENIENT_ORDER); assertEquals("Compare James Paris address", paris, dbJamesParis); Address dbJamesLaDefense = dbJames.getAddresses().get(ladefense.getType()); LOGGER.debug("James La Defense address toString(): {}", dbJamesLaDefense.toString()); ReflectionAssert.assertReflectionEquals("Comparing James La Defense address with ReflectionAssert", ladefense, dbJamesLaDefense, ReflectionComparatorMode.LENIENT_ORDER); assertEquals("Compare James La Defense address", dbJamesLaDefense, ladefense); // - Projects assertTrue(dbJames.getProjects().contains(android)); ReflectionAssert.assertReflectionEquals( "Compare in-memory and database loaded projects with RelectionUtils", james.getProjects(), dbJames.getProjects(), ReflectionComparatorMode.LENIENT_ORDER); assertEquals(james.getProjects(), dbJames.getProjects()); // - Full employee LOGGER.debug("James toString(): {}", dbJames.toString()); ReflectionAssert.assertReflectionEquals( "Compare in-memory and database loaded employees with RelectionUtils", dbJames, james, ReflectionComparatorMode.LENIENT_ORDER); assertEquals("Compare in-memory and database loaded employees with the equals method", james, dbJames); // - Generated SQL statements number Statistics statistics = hibernateTemplate.getSessionFactory().getStatistics(); assertEquals("All 8 entities are loaded: france, james, tom, android, iphone, paris, la dfense and lyon", 8, statistics.getEntityLoadCount()); assertEquals( "6 collections should be fetched: james' adresses, james' projects, iPhone members, tom's adresses, tom's projects, android members", 6, statistics.getCollectionFetchCount()); }
From source file:com.thoughtworks.go.server.service.support.HibernateInformationProvider.java
License:Apache License
@Override public Map<String, Object> asJson() { LinkedHashMap<String, Object> json = new LinkedHashMap<>(); Statistics statistics = sessionFactory.getStatistics(); if (!statistics.isStatisticsEnabled()) { return json; }//from ww w. jav a2 s . c om json.put("EntityDeleteCount", statistics.getEntityDeleteCount()); json.put("EntityInsertCount", statistics.getEntityInsertCount()); json.put("EntityLoadCount", statistics.getEntityLoadCount()); json.put("EntityFetchCount", statistics.getEntityFetchCount()); json.put("EntityUpdateCount", statistics.getEntityUpdateCount()); json.put("QueryExecutionCount", statistics.getQueryExecutionCount()); json.put("QueryExecutionMaxTime", statistics.getQueryExecutionMaxTime()); json.put("QueryExecutionMaxTimeQueryString", statistics.getQueryExecutionMaxTimeQueryString()); json.put("QueryCacheHitCount", statistics.getQueryCacheHitCount()); json.put("QueryCacheMissCount", statistics.getQueryCacheMissCount()); json.put("QueryCachePutCount", statistics.getQueryCachePutCount()); json.put("FlushCount", statistics.getFlushCount()); json.put("ConnectCount", statistics.getConnectCount()); json.put("SecondLevelCacheHitCount", statistics.getSecondLevelCacheHitCount()); json.put("SecondLevelCacheMissCount", statistics.getSecondLevelCacheMissCount()); json.put("SecondLevelCachePutCount", statistics.getSecondLevelCachePutCount()); json.put("SessionCloseCount", statistics.getSessionCloseCount()); json.put("SessionOpenCount", statistics.getSessionOpenCount()); json.put("CollectionLoadCount", statistics.getCollectionLoadCount()); json.put("CollectionFetchCount", statistics.getCollectionFetchCount()); json.put("CollectionUpdateCount", statistics.getCollectionUpdateCount()); json.put("CollectionRemoveCount", statistics.getCollectionRemoveCount()); json.put("CollectionRecreateCount", statistics.getCollectionRecreateCount()); json.put("StartTime", statistics.getStartTime()); json.put("SecondLevelCacheRegionNames", statistics.getSecondLevelCacheRegionNames()); json.put("SuccessfulTransactionCount", statistics.getSuccessfulTransactionCount()); json.put("TransactionCount", statistics.getTransactionCount()); json.put("PrepareStatementCount", statistics.getPrepareStatementCount()); json.put("CloseStatementCount", statistics.getCloseStatementCount()); json.put("OptimisticFailureCount", statistics.getOptimisticFailureCount()); LinkedHashMap<String, Object> queryStats = new LinkedHashMap<>(); json.put("Queries", queryStats); String[] queries = statistics.getQueries(); for (String query : queries) { queryStats.put(query, statistics.getQueryStatistics(query)); } LinkedHashMap<String, Object> entityStatistics = new LinkedHashMap<>(); json.put("EntityStatistics", entityStatistics); String[] entityNames = statistics.getEntityNames(); for (String entityName : entityNames) { entityStatistics.put(entityName, statistics.getEntityStatistics(entityName)); } LinkedHashMap<String, Object> roleStatistics = new LinkedHashMap<>(); json.put("RoleStatistics", roleStatistics); String[] roleNames = statistics.getCollectionRoleNames(); for (String roleName : roleNames) { roleStatistics.put(roleName, statistics.getCollectionStatistics(roleName)); } return json; }
From source file:de.iew.framework.hibernate.statistics.StatisticsLogger.java
License:Apache License
public void logStatistics() { Statistics statistics = this.sessionFactory.getStatistics(); statistics.setStatisticsEnabled(true); StringBuilder sb = new StringBuilder("\nStatistics"); sb.append("\nCloseStatementCount: ").append(statistics.getCloseStatementCount()); sb.append("\nEntityDeleteCount: ").append(statistics.getEntityDeleteCount()); sb.append("\nEntityInsertCount: ").append(statistics.getEntityInsertCount()); sb.append("\nEntityLoadCount: ").append(statistics.getEntityLoadCount()); sb.append("\nEntityFetchCount: ").append(statistics.getEntityFetchCount()); sb.append("\nEntityUpdateCount: ").append(statistics.getEntityUpdateCount()); sb.append("\nQueryExecutionCount: ").append(statistics.getQueryExecutionCount()); sb.append("\nQueryExecutionMaxTime: ").append(statistics.getQueryExecutionMaxTime()); sb.append("\nQueryExecutionMaxTimeQueryString: ").append(statistics.getQueryExecutionMaxTimeQueryString()); sb.append("\nQueryCacheHitCount: ").append(statistics.getQueryCacheHitCount()); sb.append("\nQueryCacheMissCount: ").append(statistics.getQueryCacheMissCount()); sb.append("\nQueryCachePutCount: ").append(statistics.getQueryCachePutCount()); sb.append("\nNaturalIdQueryExecutionCount: ").append(statistics.getNaturalIdQueryExecutionCount()); sb.append("\nNaturalIdQueryExecutionMaxTime: ").append(statistics.getNaturalIdQueryExecutionMaxTime()); sb.append("\nNaturalIdQueryExecutionMaxTimeRegion: ") .append(statistics.getNaturalIdQueryExecutionMaxTimeRegion()); sb.append("\nNaturalIdCacheHitCount: ").append(statistics.getNaturalIdCacheHitCount()); sb.append("\nNaturalIdCacheMissCount: ").append(statistics.getNaturalIdCacheMissCount()); sb.append("\nNaturalIdCachePutCount: ").append(statistics.getNaturalIdCachePutCount()); sb.append("\nUpdateTimestampsCacheHitCount: ").append(statistics.getUpdateTimestampsCacheHitCount()); sb.append("\nUpdateTimestampsCacheMissCount: ").append(statistics.getUpdateTimestampsCacheMissCount()); sb.append("\nUpdateTimestampsCachePutCount: ").append(statistics.getUpdateTimestampsCachePutCount()); sb.append("\nFlushCount: ").append(statistics.getFlushCount()); sb.append("\nConnectCount: ").append(statistics.getConnectCount()); sb.append("\nSecondLevelCacheHitCount: ").append(statistics.getSecondLevelCacheHitCount()); sb.append("\nSecondLevelCacheMissCount: ").append(statistics.getSecondLevelCacheMissCount()); sb.append("\nSecondLevelCachePutCount: ").append(statistics.getSecondLevelCachePutCount()); sb.append("\nSessionCloseCount: ").append(statistics.getSessionCloseCount()); sb.append("\nSessionOpenCount: ").append(statistics.getSessionOpenCount()); sb.append("\nCollectionLoadCount: ").append(statistics.getCollectionLoadCount()); sb.append("\nCollectionFetchCount: ").append(statistics.getCollectionFetchCount()); sb.append("\nCollectionUpdateCount: ").append(statistics.getCollectionUpdateCount()); sb.append("\nCollectionRemoveCount: ").append(statistics.getCollectionRemoveCount()); sb.append("\nCollectionRecreateCount: ").append(statistics.getCollectionRecreateCount()); sb.append("\nStartTime: ").append(statistics.getStartTime()); sb.append("\nQueries: ").append(statistics.getQueries()); sb.append("\nEntityNames: ").append(statistics.getEntityNames()); sb.append("\nCollectionRoleNames: ").append(statistics.getCollectionRoleNames()); sb.append("\nSecondLevelCacheRegionNames: ").append(statistics.getSecondLevelCacheRegionNames()); sb.append("\nSuccessfulTransactionCount: ").append(statistics.getSuccessfulTransactionCount()); sb.append("\nTransactionCount: ").append(statistics.getTransactionCount()); sb.append("\nPrepareStatementCount: ").append(statistics.getPrepareStatementCount()); sb.append("\nCloseStatementCount: ").append(statistics.getCloseStatementCount()); sb.append("\nOptimisticFailureCount: ").append(statistics.getOptimisticFailureCount()); if (log.isDebugEnabled()) { log.debug(sb);/*from www . j ava 2 s . c om*/ } }
From source file:gr.interamerican.bo2.impl.open.hibernate.HibernateBo2Utils.java
License:Open Source License
/** * Logs {@link SessionFactory} statistics. * /*from w w w .j a va 2 s . c om*/ * @param sessionFactory */ @SuppressWarnings("nls") public static void logSessionFactoryStatistics(SessionFactory sessionFactory) { Statistics statistics = sessionFactory.getStatistics(); Object[] stats = new Object[] { statistics.getCollectionFetchCount(), statistics.getCollectionLoadCount(), statistics.getEntityFetchCount(), statistics.getEntityLoadCount(), statistics.getFlushCount(), statistics.getOptimisticFailureCount(), statistics.getQueryExecutionMaxTime(), statistics.getQueryExecutionMaxTimeQueryString(), statistics.getSessionOpenCount(), statistics.getSecondLevelCacheHitCount(), statistics.getSecondLevelCacheMissCount() }; StringBuilder sb = new StringBuilder(); for (Object o : stats) { String s = (o == null) ? "null" : o.toString(); sb.append(s + StringConstants.COMMA + StringConstants.SPACE); } sb.setLength(sb.length() - 2); Debug.debug(logger, "Factory statistics: [ " + sb.toString() + " ]"); }
From source file:models.papmon.HibernateStat.java
License:Open Source License
public HibernateStat(Date refDate) { created = refDate;/* w ww. j ava2s. c o m*/ Session session = (Session) JPA.em().getDelegate(); Statistics stats = session.getSessionFactory().getStatistics(); queryExecutionCount = stats.getQueryExecutionCount(); queryExecutionMaxTime = stats.getQueryExecutionMaxTime(); sessionOpenCount = stats.getSessionOpenCount(); sessionCloseCount = stats.getSessionCloseCount(); entityLoadCount = stats.getEntityLoadCount(); entityInsertCount = stats.getEntityInsertCount(); entityUpdateCount = stats.getEntityUpdateCount(); entityDeleteCount = stats.getEntityDeleteCount(); entityFetchCount = stats.getEntityFetchCount(); queryCacheHitCount = stats.getQueryCacheHitCount(); queryCacheMissCount = stats.getQueryCacheMissCount(); queryCachePutCount = stats.getQueryCachePutCount(); secondLevelCacheHitCount = stats.getSecondLevelCacheHitCount(); secondLevelCacheMissCount = stats.getSecondLevelCacheMissCount(); secondLevelCachePutCount = stats.getSecondLevelCachePutCount(); stats.clear(); }