Example usage for org.hibernate.mapping Map.Entry getKey

List of usage examples for org.hibernate.mapping Map.Entry getKey

Introduction

In this page you can find the example usage for org.hibernate.mapping Map.Entry getKey.

Prototype

public KeyValue getKey() 

Source Link

Usage

From source file:com.medigy.persist.model.data.EntitySeedDataPopulator.java

License:Open Source License

public void populateSeedData() throws HibernateException {
    com.medigy.persist.model.session.Session processSession = new ProcessSession();
    processSession.setProcessName(EntitySeedDataPopulator.class.getName());

    if (!useEjb)// ww  w. j  a v  a2  s .c om
        session.save(processSession);
    else
        entityManager.persist(processSession);
    SessionManager.getInstance().pushActiveSession(processSession);

    if (log.isInfoEnabled())
        log.info("Initializing with seed data");
    globalParty = new Party(Party.SYS_GLOBAL_PARTY_NAME);

    if (!useEjb)
        session.save(globalParty);
    else
        entityManager.persist(globalParty);

    final Map<Class, Class<? extends CachedReferenceEntity>> referenceEntitiesAndCachesMap = HibernateUtil
            .getReferenceEntitiesAndRespectiveEnums(configuration);
    for (final Map.Entry<Class, Class<? extends CachedReferenceEntity>> entry : referenceEntitiesAndCachesMap
            .entrySet()) {
        final Class aClass = entry.getKey();
        CachedReferenceEntity[] cachedEntities = (CachedReferenceEntity[]) entry.getValue().getEnumConstants();
        Object[][] data = new Object[cachedEntities.length][2];
        int i = 0;
        for (final CachedReferenceEntity c : cachedEntities) {
            data[i][0] = c.getCode();
            data[i][1] = c.getLabel(); // LABEL
            i++;
        }
        if (log.isInfoEnabled())
            log.info(aClass.getCanonicalName() + " cached enums addded.");
        populateCachedReferenceEntities(aClass, cachedEntities, new String[] { "code", "label" }, data);
    }

    final Map<Class, Class<? extends CachedCustomReferenceEntity>> customReferenceEntitiesAndCachesMap = HibernateUtil
            .getCustomReferenceEntitiesAndRespectiveEnums(configuration);
    for (final Map.Entry<Class, Class<? extends CachedCustomReferenceEntity>> entry : customReferenceEntitiesAndCachesMap
            .entrySet()) {
        final Class aClass = entry.getKey();
        CachedCustomReferenceEntity[] cachedEntities = (CachedCustomReferenceEntity[]) entry.getValue()
                .getEnumConstants();
        Object[][] data = new Object[cachedEntities.length][4];
        int i = 0;
        for (final CachedCustomReferenceEntity c : cachedEntities) {
            data[i][0] = c.getCode();
            data[i][1] = c.getLabel(); // LABEL
            data[i][2] = globalParty;

            if (c instanceof CachedCustomHierarchyReferenceEntity)
                data[i][3] = ((CachedCustomHierarchyReferenceEntity) c).getParent();
            else
                data[i][3] = null;
            i++;
        }
        if (log.isInfoEnabled())
            log.info(aClass.getCanonicalName() + " cached custom enums addded.");
        populateCachedCustomReferenceEntities(aClass, cachedEntities,
                new String[] { "code", "label", "party", "parentEntity" }, data);
    }
    //loadExternalReferenceData();
    //populateEntityCacheData();
    //HibernateUtil.commitTransaction();
    SessionManager.getInstance().popActiveSession();
}

From source file:com.netspective.medigy.util.HibernateConfiguration.java

License:Open Source License

public String[] generateSchemaCreationScript(final Dialect dialect) throws HibernateException {
    final String[] existingDDL = super.generateSchemaCreationScript(dialect);
    if (dialect instanceof HSQLDialect) {
        for (int i = 0; i < existingDDL.length; i++)
            existingDDL[i] = existingDDL[i].replaceFirst("create table ", "create cached table ");
    }/*from w ww . j  a va2 s  .  c  om*/

    if (referenceEntitiesAndCachesMap.size() == 0)
        return existingDDL;

    final List<String> newDDL = new ArrayList<String>();
    for (String s : existingDDL)
        newDDL.add(s);

    for (final Map.Entry<Class, Class> entry : referenceEntitiesAndCachesMap.entrySet()) {
        final Class refEntityClass = entry.getKey();
        final Class refEntityCacheEnum = entry.getValue();

        final String tableName = ((Table) refEntityClass.getAnnotation(Table.class)).name();

        for (final Object x : refEntityCacheEnum.getEnumConstants()) {
            final CachedReferenceEntity cached = (CachedReferenceEntity) x;
            //TODO: this is kind of dumb right now, we need to do proper formatting of output, etc.
            newDDL.add("insert into " + tableName + " (type_id, type_label) values ('" + cached.getId() + "', '"
                    + cached.getLabel() + "')");
        }
    }

    return newDDL.toArray(new String[newDDL.size()]);
}

From source file:com.wavemaker.runtime.data.hibernate.DataServiceMetaData_Hib.java

License:Open Source License

private DataOperationFactory initFactory(final Session session) {
    return new DataOperationFactory() {

        // this is magic, and has to match the name of the
        // generated example query(ies).
        private static final String GENERATED_QUERY_NAME = "ExampleHQLQuery1";

        @Override/*from w w w. j a  v  a  2s . co  m*/
        public Collection<String> getEntityClassNames() {
            return DataServiceMetaData_Hib.this.entityClassNames;
        }

        @Override
        public List<Tuple.Three<String, String, Boolean>> getQueryInputs(String queryName) {

            List<Tuple.Three<String, String, Boolean>> rtn = new ArrayList<Tuple.Three<String, String, Boolean>>();

            NamedQueryDefinition def = getQueryDefinition(queryName);

            Map<String, String> m = CastUtils.cast(def.getParameterTypes());

            for (Map.Entry<String, String> e : m.entrySet()) {
                Tuple.Two<String, Boolean> t = DataServiceUtils.getQueryType(e.getValue());
                rtn.add(Tuple.tuple(e.getKey(), t.v1, t.v2));
            }

            return rtn;
        }

        @Override
        @SuppressWarnings("unchecked")
        public Collection<String> getQueryNames() {

            Collection<String> rtn = new HashSet<String>();

            Configuration cfg = getConfiguration();

            rtn.addAll(cfg.getNamedQueries().keySet());
            rtn.addAll(cfg.getNamedSQLQueries().keySet());

            return rtn;
        }

        @Override
        public List<String> getQueryReturnNames(String operationName, String queryName) {

            Query query = session.getNamedQuery(queryName);

            try {
                String[] names = query.getReturnAliases();
                if (names != null) {
                    return Arrays.asList(names);
                }
            } catch (RuntimeException ex) {
            }

            return Collections.emptyList();

        }

        @Override
        public boolean requiresResultWrapper(String operationName, String queryName) {
            NamedQueryDefinition query = getQueryDefinition(queryName);
            return DataServiceUtils.requiresResultWrapper(query.getQuery());
        }

        @Override
        public List<String> getQueryReturnTypes(String operationName, String queryName) {

            List<String> rtn = new ArrayList<String>();

            Type[] rtnTypes = getReturnTypes(queryName);

            String queryString = getQueryDefinition(queryName).getQuery();

            if (rtnTypes == null) {
                // Must be DML
                if (!DataServiceUtils.isDML(queryString)) {
                    // throw new AssertionError(
                    // "Query " + queryName + " doesn't return anything");
                    // actually if it is a sql query we also end up here -
                    // the tests have at least one...
                }
                rtn.add(DataServiceConstants.DML_OPERATION_RTN_TYPE.getName());
            } else {

                if (DataServiceUtils.isDynamicInstantiationQuery(queryString)) {
                    String className = DataServiceUtils.getDynamicInstantiationClassName(queryString);

                    if (!StringUtils.isFullyQualified(className)) {
                        if (DataServiceMetaData_Hib.this.entityNames.contains(className)) {
                            className = StringUtils.fq(getDataPackage(), className);
                        }
                    }
                    rtn.add(className);
                } else {
                    for (Type t : rtnTypes) {
                        rtn.add(getJavaTypeName(t));
                    }
                }
            }

            return rtn;
        }

        @Override
        public boolean queryReturnsSingleResult(String operationName, String queryName) {

            // hack for generated queries - only required for initial
            // ServiceDefinition instance that is used to add the service
            if (queryName.equals(GENERATED_QUERY_NAME)) {
                return true;
            }

            // to make existing tests happy
            if (queryName.startsWith("get") && queryName.endsWith("ById")) {
                return true;
            }

            return false;
        }

        private Type[] getReturnTypes(String queryName) {

            Type[] rtn = null;

            Query query = session.getNamedQuery(queryName);

            // this blows up for named sql queries (vs named hql queries)
            // exception msg from hibernate is: "not yet implemented for sql
            // queries"
            try {
                rtn = query.getReturnTypes();
            } catch (RuntimeException ex) {
                if (DataServiceLoggers.metaDataLogger.isDebugEnabled()) {
                    DataServiceLoggers.metaDataLogger
                            .debug("Failed to determine rtn type for query \"" + queryName + "\"");
                }
            }
            return rtn;
        }
    };
}

From source file:com.wavemaker.tools.data.WMHibernateConfigurationExporter.java

License:Open Source License

@Override
@SuppressWarnings("unchecked")
public void doStart() throws ExporterException {
    PrintWriter pw = null;//ww  w.j av  a 2  s .  co  m
    File file = null;
    try {
        if (this.output == null) {
            file = new File(getOutputDirectory(), "hibernate.cfg.xml");
            getTemplateHelper().ensureExistence(file);
            pw = new PrintWriter(new FileWriter(file));
            getArtifactCollector().addFile(file, "cfg.xml");
        } else {
            pw = new PrintWriter(this.output);
        }

        pw.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
                + "<!DOCTYPE hibernate-configuration PUBLIC\r\n"
                + "               \"-//Hibernate/Hibernate Configuration DTD 3.0//EN\"\r\n"
                + "               \"http://hibernate.sourceforge.net/hibernate-configuration-3.0.dtd\">\r\n"
                + "<hibernate-configuration>");

        boolean ejb3 = Boolean.valueOf((String) getProperties().get("ejb3")).booleanValue();

        Map props = new TreeMap();
        if (getConfiguration() != null) {
            props.putAll(getConfiguration().getProperties());
        }
        if (this.customProperties != null) {
            props.putAll(this.customProperties);
        }

        String sfname = (String) props.get(Environment.SESSION_FACTORY_NAME);
        pw.println("    <session-factory" + (sfname == null ? "" : " name=\"" + sfname + "\"") + ">");

        Map<String, String> ignoredProperties = new HashMap<String, String>();
        ignoredProperties.put(Environment.SESSION_FACTORY_NAME, null);
        ignoredProperties.put(Environment.HBM2DDL_AUTO, "false");
        ignoredProperties.put("hibernate.temp.use_jdbc_metadata_defaults", null);

        Set set = props.entrySet();
        Iterator iterator = set.iterator();
        while (iterator.hasNext()) {
            Map.Entry element = (Map.Entry) iterator.next();
            String key = (String) element.getKey();
            if (ignoredProperties.containsKey(key)) {
                Object ignoredValue = ignoredProperties.get(key);
                if (ignoredValue == null || element.getValue().equals(ignoredValue)) {
                    continue;
                }
            }
            if (key.startsWith("hibernate.")) { // if not starting with
                // hibernate. not relevant
                // for cfg.xml
                pw.println("        <property name=\"" + key + "\">" + element.getValue() + "</property>");
            }
        }

        if (getConfiguration() != null) {
            Iterator classMappings = getConfiguration().getClassMappings();
            while (classMappings.hasNext()) {
                PersistentClass element = (PersistentClass) classMappings.next();
                if (element instanceof RootClass) {
                    dump(pw, ejb3, element);
                }
            }

            // once more for adding generated hql files
            pw.println();
            pw.println();
            classMappings = getConfiguration().getClassMappings();
            while (classMappings.hasNext()) {
                PersistentClass element = (PersistentClass) classMappings.next();
                if (element instanceof RootClass) {
                    dumpQueryFile(pw, element);
                }
            }
        }
        pw.println("    </session-factory>\r\n" + "</hibernate-configuration>");

    }

    catch (IOException e) {
        throw new ExporterException("Problems while creating hibernate.cfg.xml", e);
    } finally {
        if (pw != null) {
            pw.flush();
            pw.close();
        }
    }
}

From source file:com.xpn.xwiki.store.migration.hibernate.R40000XWIKI6990DataMigration.java

License:Open Source License

/**
 * Generic procedure to convert identifiers with some protection against conflicting ids.
 *
 * @param map the conversion map//from w  ww. j a v  a 2s.  c o  m
 * @param callback the callback implementing the hibernate actions
 * @throws XWikiException if an error occurs during convertion
 */
private void convertDbId(final Map<Long, Long> map, IdConversionHibernateCallback callback)
        throws XWikiException {
    int count = map.size() + 1;
    while (!map.isEmpty() && count > map.size()) {
        count = map.size();
        for (Iterator<Map.Entry<Long, Long>> it = map.entrySet().iterator(); it.hasNext();) {
            Map.Entry<Long, Long> entry = it.next();

            if (!map.containsKey(entry.getValue())) {
                callback.setOldId(entry.getKey());
                callback.setNewId(entry.getValue());

                try {
                    getStore().executeWrite(getXWikiContext(), callback);
                } catch (Exception e) {
                    throw new XWikiException(XWikiException.MODULE_XWIKI_STORE,
                            XWikiException.ERROR_XWIKI_STORE_MIGRATION,
                            getName() + " migration failed while converting ID from [" + entry.getKey()
                                    + "] to [" + entry.getValue() + "]",
                            e);
                }
                it.remove();
            }
        }
    }

    if (!map.isEmpty()) {
        throw new XWikiException(XWikiException.MODULE_XWIKI_STORE, XWikiException.ERROR_XWIKI_STORE_MIGRATION,
                getName() + " migration failed. Unresolved circular reference during id migration.");
    }
}

From source file:de.innovationgate.webgate.api.jdbc.WGDatabaseImpl.java

License:Open Source License

/**
 * @throws WGAPIException /*from w ww .j av a 2  s  .  co m*/
 * @see de.innovationgate.webgate.api.WGDatabaseCore#getChildEntries(WGStructEntry)
 */
public Iterator<WGDocumentCore> getChildEntries(WGStructEntry structEntry, WGPageOrderSet order)
        throws WGAPIException {
    StructEntry hentry = (StructEntry) ((WGDocumentImpl) structEntry.getCore()).getEntity();
    Map<String, Object> params = new HashMap<String, Object>();

    String orderClause;
    if (order != null && getCsVersion().getPatchLevel() > 4) {
        orderClause = buildHqlPageOrderClause(order, params);
    } else {
        if (order != null && getCsVersion().getPatchLevel() <= 4) {
            WGFactory.getLogger().warn("Order clause not allowed for CS PL < 5. Using default order.");
        }
        orderClause = "struct.position asc, struct.title asc, struct.key asc";
    }

    Query query = getSession()
            .createQuery("select struct from StructEntry as struct where struct.parententry = :entry order by "
                    + orderClause);

    params.put("entry", hentry);
    for (Map.Entry<String, Object> param : params.entrySet()) {
        query.setParameter(param.getKey(), param.getValue());
    }

    return new StructEntryIterator(this, query);

}

From source file:de.innovationgate.webgate.api.jdbc.WGDatabaseImpl.java

License:Open Source License

/**
 * @throws WGAPIException //from w  w  w. j av  a  2s.  c  om
 * @see de.innovationgate.webgate.api.WGDatabaseCore#getRootEntries(WGArea)
 */
public Iterator<WGDocumentCore> getRootEntries(WGArea area, WGPageOrderSet order) throws WGAPIException {

    Area harea = (Area) ((WGDocumentImpl) area.getCore()).getEntity();
    Map<String, Object> params = new HashMap<String, Object>();

    String orderClause;
    if (order != null) {
        orderClause = buildHqlPageOrderClause(order, params);
    } else {
        orderClause = "struct.position asc, struct.title asc, struct.key asc";
    }

    Query query = getSession().createQuery(
            "select struct from StructEntry as struct where struct.area = :area order by " + orderClause);
    params.put("area", harea);
    for (Map.Entry<String, Object> param : params.entrySet()) {
        query.setParameter(param.getKey(), param.getValue());
    }

    return new StructEntryIterator(this, query);
}

From source file:de.innovationgate.webgate.api.jdbc.WGDatabaseImpl.java

License:Open Source License

public List<WGRelationData> getIncomingRelations(Object structKey, String language, String contentClass,
        String relName, String relGroupName, Boolean includeUnreleased, WGColumnSet order)
        throws WGAPIException {
    if (_ddlVersion < WGDatabase.CSVERSION_WGA5) {
        return Collections.emptyList();
    }//from  ww w.j av  a  2  s.c om

    StringBuffer hql = new StringBuffer(
            "select relation from ContentRelation as relation where relation.targetstructentry = :structkey and relation.targetlanguage = :language and relation.parentcontent.status in (:states)");
    Map<String, Object> parameters = new HashMap<String, Object>();
    parameters.put("structkey", structKey);
    parameters.put("language", language);

    if (relName != null) {
        hql.append(" and relation.name = :relname");
        parameters.put("relname", relName);
    }

    if (relGroupName != null) {
        hql.append(" and relation.group = :relgroup");
        parameters.put("relgroup", relGroupName);
    }

    if (contentClass != null) {
        hql.append(" and relation.parentcontent.contentclass = :sourceclass");
        parameters.put("sourceclass", contentClass);
    }

    if (order != null) {
        hql.append(" order by ");
        hql.append(buildHqlContentOrderClause("relation.parentcontent", order, parameters));
    }

    Query query = getSession().createQuery(hql.toString());
    for (Map.Entry<String, Object> param : parameters.entrySet()) {
        query.setParameter(param.getKey(), param.getValue());
    }

    if (includeUnreleased) {
        query.setParameterList("states",
                new Object[] { WGContent.STATUS_DRAFT, WGContent.STATUS_REVIEW, WGContent.STATUS_RELEASE });
    } else {
        query.setParameterList("states", new Object[] { WGContent.STATUS_RELEASE });
    }

    /*
    Criteria crit = getSession().createCriteria(ContentRelation.class);
    crit.add(Restrictions.eq("targetstructentry", structKey));
    crit.add(Restrictions.eq("targetlanguage", language));
    crit.setFetchMode("parentcontent", FetchMode.SELECT);
            
    if (includeUnreleased) {
    crit.createCriteria("parentcontent").add(Restrictions.in("status", new Object[] {WGContent.STATUS_DRAFT, WGContent.STATUS_REVIEW, WGContent.STATUS_RELEASE}));
    }
    else {
    crit.createCriteria("parentcontent").add(Restrictions.eq("status", WGContent.STATUS_RELEASE));
    }*/

    List<WGRelationData> incoming = new ArrayList();

    for (ContentRelation rel : (List<ContentRelation>) query.list()) {
        WGRelationData relData = createWGRelationData(rel);
        incoming.add(relData);
    }

    return incoming;

}

From source file:org.apache.ignite.cache.hibernate.HibernateL2CacheStrategySelfTest.java

License:Apache License

/**
 * @param accessType Cache access typr.//w ww.j av a2  s.  c  o  m
 * @param igniteInstanceName Name of the grid providing caches.
 * @return Session factory.
 */
private SessionFactory startHibernate(AccessType accessType, String igniteInstanceName) {
    StandardServiceRegistryBuilder builder = new StandardServiceRegistryBuilder();

    builder.applySetting("hibernate.connection.url", CONNECTION_URL);

    for (Map.Entry<String, String> e : HibernateL2CacheSelfTest
            .hibernateProperties(igniteInstanceName, accessType.name()).entrySet())
        builder.applySetting(e.getKey(), e.getValue());

    builder.applySetting(USE_STRUCTURED_CACHE, "true");
    builder.applySetting(REGION_CACHE_PROPERTY + ENTITY1_NAME, "cache1");
    builder.applySetting(REGION_CACHE_PROPERTY + ENTITY2_NAME, "cache2");
    builder.applySetting(REGION_CACHE_PROPERTY + TIMESTAMP_CACHE, TIMESTAMP_CACHE);
    builder.applySetting(REGION_CACHE_PROPERTY + QUERY_CACHE, QUERY_CACHE);

    MetadataSources metadataSources = new MetadataSources(builder.build());

    metadataSources.addAnnotatedClass(Entity1.class);
    metadataSources.addAnnotatedClass(Entity2.class);
    metadataSources.addAnnotatedClass(Entity3.class);
    metadataSources.addAnnotatedClass(Entity4.class);

    Metadata metadata = metadataSources.buildMetadata();

    for (PersistentClass entityBinding : metadata.getEntityBindings()) {
        if (!entityBinding.isInherited())
            ((RootClass) entityBinding).setCacheConcurrencyStrategy(accessType.getExternalName());
    }

    return metadata.buildSessionFactory();
}

From source file:org.apereo.portal.tools.dbloader.HibernateDbLoader.java

License:Apache License

@Override
public void process(DbLoaderConfig configuration)
        throws ParserConfigurationException, SAXException, IOException {
    final String scriptFile = configuration.getScriptFile();
    final List<String> script;
    if (scriptFile == null) {
        script = null;//  w w w  .j a v  a 2  s. co m
    } else {
        script = new LinkedList<String>();
    }

    final ITableDataProvider tableData = this.loadTables(configuration, dialect);

    //Handle table drop/create
    if (configuration.isDropTables() || configuration.isCreateTables()) {
        //Load Table object model
        final Map<String, Table> tables = tableData.getTables();

        final Mapping mapping = this.configuration.buildMapping();
        final String defaultCatalog = this.configuration.getProperty(Environment.DEFAULT_CATALOG);
        final String defaultSchema = this.configuration.getProperty(Environment.DEFAULT_SCHEMA);

        final Map<String, DataAccessException> failedSql = new LinkedHashMap<String, DataAccessException>();

        //Generate and execute drop table scripts
        if (configuration.isDropTables()) {
            final List<String> dropScript = this.dropScript(tables.values(), dialect, defaultCatalog,
                    defaultSchema);

            if (script == null) {
                this.logger.info("Dropping existing tables");
                for (final String sql : dropScript) {
                    this.logger.info(sql);
                    try {
                        jdbcOperations.update(sql);
                    } catch (NonTransientDataAccessResourceException dae) {
                        throw dae;
                    } catch (DataAccessException dae) {
                        failedSql.put(sql, dae);
                    }
                }
            } else {
                script.addAll(dropScript);
            }
        }

        //Log any drop/create statements that failed
        for (final Map.Entry<String, DataAccessException> failedSqlEntry : failedSql.entrySet()) {
            this.logger.warn(
                    "'" + failedSqlEntry.getKey() + "' failed to execute due to " + failedSqlEntry.getValue());
        }

        //Generate and execute create table scripts
        if (configuration.isCreateTables()) {
            final List<String> createScript = this.createScript(tables.values(), dialect, mapping,
                    defaultCatalog, defaultSchema);

            if (script == null) {
                this.logger.info("Creating tables");
                for (final String sql : createScript) {
                    this.logger.info(sql);
                    jdbcOperations.update(sql);
                }
            } else {
                script.addAll(createScript);
            }
        }
    }

    //Perform database population
    if (script == null && configuration.isPopulateTables()) {
        this.logger.info("Populating database");
        final Map<String, Map<String, Integer>> tableColumnTypes = tableData.getTableColumnTypes();
        this.populateTables(configuration, tableColumnTypes);
    }

    //Write out the script file
    if (script != null) {
        for (final ListIterator<String> iterator = script.listIterator(); iterator.hasNext();) {
            final String sql = iterator.next();
            iterator.set(sql + ";");
        }

        final File outputFile = new File(scriptFile);
        FileUtils.writeLines(outputFile, script);
        this.logger.info("Saved DDL to: " + outputFile.getAbsolutePath());
    }
}