List of usage examples for org.hibernate.engine.spi EntityKey getIdentifier
public Serializable getIdentifier()
From source file:cc.alcina.framework.entity.impl.jboss.JPAHibernateImpl.java
License:Apache License
@Override public Set<HiliLocator> getSessionEntityLocators(EntityManager entityManager) { Set<HiliLocator> result = new LinkedHashSet<>(); try {//from w w w. jav a 2 s . c om SessionImplementor sessionImpl = (SessionImplementor) entityManager.getDelegate(); PersistenceContext persistenceContext = sessionImpl.getPersistenceContext(); Field entitiesField = StatefulPersistenceContext.class.getDeclaredField("entitiesByKey"); Field proxiesField = StatefulPersistenceContext.class.getDeclaredField("proxiesByKey"); Field entityPersisterField = EntityKey.class.getDeclaredField("persister"); entitiesField.setAccessible(true); entityPersisterField.setAccessible(true); proxiesField.setAccessible(true); List<Map> maps = Arrays.asList((Map) entitiesField.get(persistenceContext), (Map) proxiesField.get(persistenceContext)); for (Map map : maps) { for (Object obj : map.keySet()) { EntityKey key = (EntityKey) obj; long id = (long) key.getIdentifier(); SingleTableEntityPersister persister = (SingleTableEntityPersister) entityPersisterField .get(key); Class clazz = persister.getEntityMetamodel().getEntityType().getReturnedClass(); result.add(new HiliLocator(clazz, id, 0)); } } } catch (Exception e) { e.printStackTrace(); } return result; }
From source file:com.amalto.core.storage.hibernate.HibernateStorageTransaction.java
License:Open Source License
/** * Dumps all current entities in <code>session</code> using data model information from <code>storage</code>. * * @param session The Hibernate session that failed to be committed. * @param storage A {@link com.amalto.core.storage.hibernate.HibernateStorage} that can be used to retrieve metadata information for all objects in * <code>session</code>. *//*from w w w . java 2 s . c o m*/ private static void dumpTransactionContent(Session session, HibernateStorage storage) { Level currentLevel = Level.INFO; if (LOGGER.isEnabledFor(currentLevel)) { Set<EntityKey> failedKeys = new HashSet<>(session.getStatistics().getEntityKeys()); // Copy content to avoid concurrent modification issues. int i = 1; ObjectDataRecordReader reader = new ObjectDataRecordReader(); MappingRepository mappingRepository = storage.getTypeEnhancer().getMappings(); StorageClassLoader classLoader = storage.getClassLoader(); DataRecordXmlWriter writer = new DataRecordXmlWriter(); ResettableStringWriter xmlContent = new ResettableStringWriter(); for (EntityKey failedKey : failedKeys) { String entityTypeName = StringUtils.substringAfterLast(failedKey.getEntityName(), "."); //$NON-NLS-1$ LOGGER.log(currentLevel, "Entity #" + i++ + " (type=" + entityTypeName + ", id=" + failedKey.getIdentifier() + ")"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$ try { storage.getClassLoader().bind(Thread.currentThread()); Wrapper o = (Wrapper) ((SessionImpl) session).getPersistenceContext().getEntity(failedKey); if (!session.isReadOnly(o)) { if (o != null) { ComplexTypeMetadata type = classLoader .getTypeFromClass(classLoader.loadClass(failedKey.getEntityName())); if (type != null) { DataRecord record = reader.read(mappingRepository.getMappingFromDatabase(type), o); writer.write(record, xmlContent); LOGGER.log(currentLevel, xmlContent + "\n(taskId='" + o.taskId() + "', timestamp='" //$NON-NLS-1$//$NON-NLS-2$ + o.timestamp() + "')"); //$NON-NLS-1$ } else { LOGGER.warn("Could not find data model type for object " + o); //$NON-NLS-1$ } } else { LOGGER.warn("Could not find an object for entity " + failedKey); //$NON-NLS-1$ } } } catch (ObjectNotFoundException missingRefException) { LOGGER.log(currentLevel, "Can not log entity: contains a unresolved reference to '" //$NON-NLS-1$ + missingRefException.getEntityName() + "' with id '" //$NON-NLS-1$ + missingRefException.getIdentifier() + "'"); //$NON-NLS-1$ } catch (Exception serializationException) { LOGGER.log(currentLevel, "Failed to log entity content for type " + entityTypeName //$NON-NLS-1$ + " (enable DEBUG for exception details)."); //$NON-NLS-1$ if (LOGGER.isDebugEnabled()) { LOGGER.debug("Serialization exception occurred.", serializationException); //$NON-NLS-1$ } } finally { xmlContent.reset(); storage.getClassLoader().unbind(Thread.currentThread()); } if (i > TRANSACTION_DUMP_MAX) { if (!LOGGER.isDebugEnabled()) { int more = failedKeys.size() - i; if (more > 0) { LOGGER.log(currentLevel, "and " + more + " more... (enable DEBUG for full dump)"); //$NON-NLS-1$ //$NON-NLS-2$ } return; } else { currentLevel = Level.DEBUG; // Continue the dump but with a DEBUG level } } } } }
From source file:org.jpos.ee.DB.java
License:Open Source License
@SuppressWarnings({ "unchecked" })
public void printStats() {
if (getLog() != null) {
LogEvent info = getLog().createInfo();
if (session != null) {
info.addMessage("==== STATISTICS ====");
SessionStatistics statistics = session().getStatistics();
info.addMessage("==== ENTITIES ====");
Set<EntityKey> entityKeys = statistics.getEntityKeys();
for (EntityKey ek : entityKeys) {
info.addMessage(String.format("[%s] %s", ek.getIdentifier(), ek.getEntityName()));
}// w w w . ja v a 2s . c om
info.addMessage("==== COLLECTIONS ====");
Set<CollectionKey> collectionKeys = statistics.getCollectionKeys();
for (CollectionKey ck : collectionKeys) {
info.addMessage(String.format("[%s] %s", ck.getKey(), ck.getRole()));
}
info.addMessage("=====================");
} else {
info.addMessage("Session is not open");
}
Logger.log(info);
}
}
From source file:org.jpos.transaction.DebugDB.java
License:Open Source License
@Override public int prepare(long id, Serializable context) { Context ctx = (Context) context; DB db = (DB) ctx.get(TxnConstants.DB); Session session = db.session();/*from ww w .j a v a 2 s. com*/ SessionStatistics statistics = session.getStatistics(); Set<EntityKey> entityKeys = statistics.getEntityKeys(); ctx.log(String.format("ENTITIES: (%d)", statistics.getEntityCount())); for (EntityKey ek : entityKeys) { Object obj = session.get(ek.getEntityName(), ek.getIdentifier()); LockMode lockMode = session.getCurrentLockMode(obj); ctx.log(String.format("[%s] %s %s", ek.getIdentifier(), ek.getEntityName(), lockMode)); } ctx.log("==== COLLECTIONS ===="); Set<CollectionKey> collectionKeys = statistics.getCollectionKeys(); for (CollectionKey ck : collectionKeys) { ctx.log(String.format("[%s] %s", ck.getKey(), ck.getRole())); } ctx.log("====================="); return PREPARED | READONLY | NO_JOIN; }
From source file:org.squashtest.tm.service.internal.library.TreeNodeCopier.java
License:Open Source License
private <T> void cleanSomeCache(Class<T> c) { // em.unwrap(Session.class).flush(); em.flush();/* w w w . j a va2 s. c o m*/ Collection<Object> entities = new ArrayList<>(); for (Object obj : em.unwrap(Session.class).getStatistics().getEntityKeys()) { EntityKey key = (EntityKey) obj; Object entity = em.unwrap(Session.class).get(key.getEntityName(), key.getIdentifier()); if (!c.isAssignableFrom(entity.getClass())) { entities.add(entity); } } genericDao.clearFromCache(entities); em.flush(); // em.unwrap(Session.class).flush(); }