List of usage examples for org.hibernate.stat Statistics logSummary
void logSummary();
From source file:com.evolveum.midpoint.repo.sql.AddGetObjectTest.java
License:Apache License
@Test(enabled = false) public <T extends ObjectType> void perfTest() throws Exception { Statistics stats = getFactory().getStatistics(); stats.setStatisticsEnabled(true);//from w ww. j a v a 2s . c om final File OBJECTS_FILE = new File("./src/test/resources/10k-users.xml"); List<PrismObject<? extends Objectable>> elements = prismContext.parseObjects(OBJECTS_FILE); long previousCycle = 0; long time = System.currentTimeMillis(); for (int i = 0; i < elements.size(); i++) { if (i % 500 == 0) { LOGGER.info("Previous cycle time {}. Next cycle: {}", new Object[] { (System.currentTimeMillis() - time - previousCycle), i }); previousCycle = System.currentTimeMillis() - time; } PrismObject<T> object = (PrismObject<T>) elements.get(i); repositoryService.addObject(object, null, new OperationResult("add performance test")); } LOGGER.info("Time to add objects ({}): {}", new Object[] { elements.size(), (System.currentTimeMillis() - time) }); stats.logSummary(); }
From source file:com.hazelcast.hibernate.LocalRegionFactoryDefaultTest.java
License:Open Source License
@Test public void testEntity() { final HazelcastInstance hz = getHazelcastInstance(sf); assertNotNull(hz);//www .j a v a 2s. c om final int count = 100; final int childCount = 3; insertDummyEntities(count, childCount); List<DummyEntity> list = new ArrayList<DummyEntity>(count); Session session = sf.openSession(); try { for (int i = 0; i < count; i++) { DummyEntity e = (DummyEntity) session.get(DummyEntity.class, (long) i); session.evict(e); list.add(e); } } finally { session.close(); } session = sf.openSession(); Transaction tx = session.beginTransaction(); try { for (DummyEntity dummy : list) { dummy.setDate(new Date()); session.update(dummy); } tx.commit(); } catch (Exception e) { tx.rollback(); e.printStackTrace(); } finally { session.close(); } Statistics stats = sf.getStatistics(); assertEquals((childCount + 1) * count, stats.getEntityInsertCount()); // twice put of entity and properties (on load and update) and once put of collection assertEquals((childCount + 1) * count * 2 + count, stats.getSecondLevelCachePutCount()); assertEquals(childCount * count, stats.getEntityLoadCount()); assertEquals(count, stats.getSecondLevelCacheHitCount()); // collection cache miss assertEquals(count, stats.getSecondLevelCacheMissCount()); stats.logSummary(); }
From source file:com.hazelcast.hibernate.RegionFactoryDefaultTest.java
License:Open Source License
@Test public void testEntity() { final HazelcastInstance hz = getHazelcastInstance(sf); assertNotNull(hz);// w w w .j a v a 2 s .c o m final int count = 100; final int childCount = 3; insertDummyEntities(count, childCount); List<DummyEntity> list = new ArrayList<DummyEntity>(count); Session session = sf.openSession(); try { for (int i = 0; i < count; i++) { DummyEntity e = (DummyEntity) session.get(DummyEntity.class, (long) i); session.evict(e); list.add(e); } } finally { session.close(); } session = sf.openSession(); Transaction tx = session.beginTransaction(); try { for (DummyEntity dummy : list) { dummy.setDate(new Date()); session.update(dummy); } tx.commit(); } catch (Exception e) { tx.rollback(); e.printStackTrace(); } finally { session.close(); } Statistics stats = sf.getStatistics(); Map<?, ?> cache = hz.getMap(DummyEntity.class.getName()); Map<?, ?> propCache = hz.getMap(DummyProperty.class.getName()); Map<?, ?> propCollCache = hz.getMap(DummyEntity.class.getName() + ".properties"); assertEquals((childCount + 1) * count, stats.getEntityInsertCount()); // twice put of entity and properties (on load and update) and once put of collection // TODO: fix next assertion -> // assertEquals((childCount + 1) * count * 2, stats.getSecondLevelCachePutCount()); assertEquals(childCount * count, stats.getEntityLoadCount()); assertEquals(count, stats.getSecondLevelCacheHitCount()); // collection cache miss assertEquals(count, stats.getSecondLevelCacheMissCount()); assertEquals(count, cache.size()); assertEquals(count * childCount, propCache.size()); assertEquals(count, propCollCache.size()); sf.getCache().evictEntityRegion(DummyEntity.class); sf.getCache().evictEntityRegion(DummyProperty.class); assertEquals(0, cache.size()); assertEquals(0, propCache.size()); stats.logSummary(); }
From source file:com.hazelcast.hibernate.RegionFactoryDefaultTest.java
License:Open Source License
@Test public void testQuery() { final int entityCount = 10; final int queryCount = 3; insertDummyEntities(entityCount);/*from w w w .j a v a 2 s .co m*/ List<DummyEntity> list = null; for (int i = 0; i < queryCount; i++) { list = executeQuery(sf); assertEquals(entityCount, list.size()); } assertNotNull(list); Session session = sf.openSession(); Transaction tx = session.beginTransaction(); try { for (DummyEntity dummy : list) { session.delete(dummy); } tx.commit(); } catch (Exception e) { tx.rollback(); e.printStackTrace(); } finally { session.close(); } Statistics stats = sf.getStatistics(); assertEquals(1, stats.getQueryCachePutCount()); assertEquals(1, stats.getQueryCacheMissCount()); assertEquals(queryCount - 1, stats.getQueryCacheHitCount()); assertEquals(1, stats.getQueryExecutionCount()); assertEquals(entityCount, stats.getEntityInsertCount()); // FIXME // HazelcastRegionFactory puts into L2 cache 2 times; 1 on insert, 1 on query execution // assertEquals(entityCount, stats.getSecondLevelCachePutCount()); assertEquals(entityCount, stats.getEntityLoadCount()); assertEquals(entityCount, stats.getEntityDeleteCount()); assertEquals(entityCount * (queryCount - 1) * 2, stats.getSecondLevelCacheHitCount()); // collection cache miss assertEquals(entityCount, stats.getSecondLevelCacheMissCount()); stats.logSummary(); }
From source file:kiwi.action.debug.StatisticsAction.java
License:Open Source License
public void dumpHibernateStatistics() { SessionFactory sf = hibernateSession.getSessionFactory(); Statistics s = sf.getStatistics(); s.logSummary(); }
From source file:ome.server.itests.scalability.SqlHibernateDatasourceComparisonTest.java
License:Open Source License
@Test(enabled = false) public void testCompareDirect() throws Exception { prime();//from ww w. j a v a2 s . c o m Statistics rawstats = rawsf.getStatistics(); Statistics omestats = omesf.getStatistics(); System.out.println("Clearing stats"); rawstats.clear(); omestats.clear(); Category.getInstance("org.hibernate.SQL").setLevel(Level.DEBUG); // callHibernateAlone(); callHibernateChannels(); // callHibernateLinks(); System.out.println("**** RawStats"); rawstats.logSummary(); // callOmeroAlone(); callOmeroChannels(); // callOmeroLinks(); System.out.println("**** OmeStats"); omestats.logSummary(); System.out.println(new Report()); }
From source file:org.infinispan.test.hibernate.cache.commons.functional.ReadWriteTest.java
License:LGPL
@Test public void testCollectionCache() throws Exception { final Statistics stats = sessionFactory().getStatistics(); stats.clear();//w w w .j av a 2 s. c o m final Item item = new Item("chris", "Chris's Item"); final Item another = new Item("another", "Owned Item"); item.addItem(another); withTxSession(s -> { s.persist(item); s.persist(another); }); // The collection has been removed, but we can't add it again immediately using putFromLoad TIME_SERVICE.advance(1); withTxSession(s -> { Item loaded = s.load(Item.class, item.getId()); assertEquals(1, loaded.getItems().size()); }); String itemsRegionName = Item.class.getName() + ".items"; SecondLevelCacheStatistics cStats = stats.getSecondLevelCacheStatistics(itemsRegionName); assertEquals(1, cStats.getElementCountInMemory()); withTxSession(s -> { Item loadedWithCachedCollection = (Item) s.load(Item.class, item.getId()); stats.logSummary(); assertEquals(item.getName(), loadedWithCachedCollection.getName()); assertEquals(item.getItems().size(), loadedWithCachedCollection.getItems().size()); assertEquals(1, cStats.getHitCount()); assertEquals(1, TEST_SESSION_ACCESS.getRegion(sessionFactory(), itemsRegionName).getElementCountInMemory()); Item itemElement = loadedWithCachedCollection.getItems().iterator().next(); itemElement.setOwner(null); loadedWithCachedCollection.getItems().clear(); s.delete(itemElement); s.delete(loadedWithCachedCollection); }); }
From source file:org.infinispan.test.hibernate.cache.functional.ReadWriteTest.java
License:LGPL
@Test public void testCollectionCache() throws Exception { final Statistics stats = sessionFactory().getStatistics(); stats.clear();/*w ww. ja v a 2 s . co m*/ final Item item = new Item("chris", "Chris's Item"); final Item another = new Item("another", "Owned Item"); item.addItem(another); withTxSession(s -> { s.persist(item); s.persist(another); }); // The collection has been removed, but we can't add it again immediately using putFromLoad TIME_SERVICE.advance(1); withTxSession(s -> { Item loaded = s.load(Item.class, item.getId()); assertEquals(1, loaded.getItems().size()); }); SecondLevelCacheStatistics cStats = stats.getSecondLevelCacheStatistics(Item.class.getName() + ".items"); assertEquals(1, cStats.getElementCountInMemory()); withTxSession(s -> { Item loadedWithCachedCollection = (Item) s.load(Item.class, item.getId()); stats.logSummary(); assertEquals(item.getName(), loadedWithCachedCollection.getName()); assertEquals(item.getItems().size(), loadedWithCachedCollection.getItems().size()); assertEquals(1, cStats.getHitCount()); Map cacheEntries = cStats.getEntries(); assertEquals(1, cacheEntries.size()); Item itemElement = loadedWithCachedCollection.getItems().iterator().next(); itemElement.setOwner(null); loadedWithCachedCollection.getItems().clear(); s.delete(itemElement); s.delete(loadedWithCachedCollection); }); }
From source file:org.jboss.as.jpa.hibernate4.management.HibernateManagementAdaptor.java
License:Open Source License
private void registerStatisticOperations(ManagementResourceRegistration jpaHibernateRegistration) { /**/*w ww. ja v a 2 s . c o m*/ * reset all statistics */ DescriptionProvider clear = new DescriptionProvider() { @Override public ModelNode getModelDescription(Locale locale) { return HibernateDescriptions.clear(locale); } }; OperationStepHandler clearHandler = new AbstractMetricsHandler() { @Override void handle(final ModelNode response, final String name, Statistics stats, OperationContext context) { stats.clear(); } }; jpaHibernateRegistration.registerOperationHandler(OPERATION_CLEAR, clearHandler, clear); /** * log statistics at INFO level */ DescriptionProvider summary = new DescriptionProvider() { @Override public ModelNode getModelDescription(Locale locale) { return HibernateDescriptions.summary(locale); } }; OperationStepHandler summaryHandler = new AbstractMetricsHandler() { @Override void handle(final ModelNode response, final String name, Statistics stats, OperationContext context) { stats.logSummary(); } }; jpaHibernateRegistration.registerOperationHandler(OPERATION_SUMMARY, summaryHandler, summary); }