Example usage for org.hibernate Query setReadOnly

List of usage examples for org.hibernate Query setReadOnly

Introduction

In this page you can find the example usage for org.hibernate Query setReadOnly.

Prototype

Query<R> setReadOnly(boolean readOnly);

Source Link

Document

Set the read-only/modifiable mode for entities and proxies loaded by this Query.

Usage

From source file:at.treedb.db.Iterator.java

License:Open Source License

/**
 * <p>//from  w  ww  .  j a v  a2s. c o m
 * Returns the next single object of an entity. This method is used to
 * enumerate large (binary) objects of a entity set. Single object fetching
 * should avoid running into OutOfMemory exceptions.
 * </p>
 * <p>
 * <b>Implementation details:</b>
 * <ol>
 * <li>Hibernate: Statless session<br>
 * </li>
 * <li>JPA/EclipseLink: <a href=
 * "http://wiki.eclipse.org/Using_Advanced_Query_API_%28ELUG%29#Example_107-12">
 * ReadAllQuery/CursoredStream</a> (streaming data) wasn't really working -
 * every time the whole entity data set was loaded by the first access!
 * Actual a native SQL statement is used to pre-load all object IDs. This
 * list is used to retrieve all objects.</li>
 * <li>JPA/ObjectDB: Slow query with setting first position/max data set
 * size.</li>
 * </ol>
 * 
 * @return entity object
 * @throws Exception
 */
@SuppressWarnings("unchecked")
public List<Object> nextObject() throws Exception {
    if (!hasNext) {
        return null;
    }
    int size = 1;
    List<Object> list = null;
    // Hibernate environment
    if (dao.isHibernate() || dao.getJPAimpl() == DAO.JPA_IMPL.HIBERNATEJPA) {
        if (sresult == null) {
            Query query = ((DAOhibernate) dao).createQuery(queryString, map);
            query.setReadOnly(true);
            // MIN_VALUE gives hint to JDBC driver to stream results - but
            // this magic
            // is not working for every DB!
            if (dao.getDB() != DAO.DB.H2) {
                query.setFetchSize(Integer.MIN_VALUE);
            }
            sresult = query.scroll(ScrollMode.FORWARD_ONLY);
        }
        if (sresult.next()) {
            list = new ArrayList<Object>();
            list.add(sresult.get(0));
        }
    } else {
        if (dao.getJPAimpl() != DAO.JPA_IMPL.OBJECTDB) {
            if (idList == null) {
                idList = (List<Integer>) dao.nativeQuery(nativeQueryString);
                if (idList.size() == 0) {
                    return null;
                }
            }
            if (listIndex < idList.size()) {
                list = new ArrayList<Object>();
                Object o = Base.load(dao, (Class<? extends Base>) clazz, idList.get(listIndex));
                if (o == null) {
                    throw new Exception("Iterator.nextObject(): loading JPA object for ID "
                            + idList.get(listIndex) + " failed");
                }
                list.add(o);
                ++listIndex;
            }
        } else {
            // TODO: fallback for ObjectDB - working, but slow, very slow
            list = (List<Object>) dao.query(queryString, index, size, map);
        }
    }
    index += size;
    toRead -= size;
    if (toRead == 0) {
        hasNext = false;
    }
    return list;
}

From source file:com.enonic.cms.store.dao.FindCategoryByKeysQuerier.java

License:Open Source License

List<CategoryEntity> queryCategories(final Collection<CategoryKey> categoryKeys) {
    final SelectBuilder hqlQuery = new SelectBuilder(0);
    hqlQuery.addSelect("c");
    hqlQuery.addFromTable(CategoryEntity.class.getName(), "c", SelectBuilder.NO_JOIN, null);
    hqlQuery.addFilter("AND", new InClauseBuilder<CategoryKey>("c.key", categoryKeys) {
        public void appendValue(StringBuffer sql, CategoryKey value) {
            sql.append(value.toString());
        }//from   w  ww .j  a  va 2 s  .  c o  m
    }.toString());

    Query compiled = hibernateSession.createQuery(hqlQuery.toString());
    compiled.setReadOnly(true);
    compiled.setCacheable(false);
    //noinspection unchecked
    return compiled.list();
}

From source file:com.enonic.cms.store.dao.FindContentByKeysQuerier.java

License:Open Source License

List<ContentEntity> queryContent(final Collection<ContentKey> contentKeys) {
    final SelectBuilder hqlQuery = new SelectBuilder(0);
    hqlQuery.addSelect("c");
    hqlQuery.addFromTable(ContentEntity.class.getName(), "c", SelectBuilder.NO_JOIN, null);

    if (eagerFetchingIsSafe(contentKeys)) {
        if (contentEagerFetches.hasTable(ContentEagerFetches.Table.ACCESS)) {
            hqlQuery.addFromTable("c.contentAccessRights", null, SelectBuilder.LEFT_JOIN_FETCH, null);
        }// w w w.j a va2s.c om
        if (contentEagerFetches.hasTable(ContentEagerFetches.Table.MAIN_VERSION)) {
            hqlQuery.addFromTable("c.mainVersion", null, SelectBuilder.LEFT_JOIN_FETCH, null);
        }
        if (contentEagerFetches.hasTable(ContentEagerFetches.Table.SECTION_CONTENT)) {
            hqlQuery.addFromTable("c.sectionContents", null, SelectBuilder.LEFT_JOIN_FETCH, null);
        }
        if (contentEagerFetches.hasTable(ContentEagerFetches.Table.DIRECT_MENUITEM_PLACEMENT)) {
            hqlQuery.addFromTable("c.directMenuItemPlacements", null, SelectBuilder.LEFT_JOIN_FETCH, null);
        }
        if (contentEagerFetches.hasTable(ContentEagerFetches.Table.CONTENT_HOME)) {
            hqlQuery.addFromTable("c.contentHomes", null, SelectBuilder.LEFT_JOIN_FETCH, null);
        }
    }

    hqlQuery.addFilter("AND", new InClauseBuilder<ContentKey>("c.key", contentKeys) {
        public void appendValue(StringBuffer sql, ContentKey value) {
            sql.append(value.toString());
        }
    }.toString());

    final Query compiled = hibernateSession.createQuery(hqlQuery.toString());
    compiled.setReadOnly(fetchEntitiesAsReadOnly);
    compiled.setCacheable(false);
    //noinspection unchecked
    return compiled.list();
}

From source file:com.enonic.cms.store.dao.FindRelatedChildrenCommand.java

License:Open Source License

Collection<RelatedChildContent> execute(RelatedChildContentQuery relatedChildContentQuery) {
    if (relatedChildContentQuery.getContentVersions() == null
            || relatedChildContentQuery.getContentVersions().size() == 0) {
        throw new IllegalArgumentException("Given contentVersionKeys must contain values");
    }//from w  w w.jav  a 2  s.  c o m

    String hql = getRelatedChildrenByKeyHQL(relatedChildContentQuery);

    Query compiled = hibernateTemplate.getSessionFactory().getCurrentSession().createQuery(hql);
    compiled.setCacheable(false);
    compiled.setReadOnly(true);
    compiled.setParameter("deleted", 0);

    if (!relatedChildContentQuery.isIncludeOfflineContent()) {
        compiled.setParameter("status", ContentStatus.APPROVED.getKey());
        compiled.setParameter("timeNow",
                relatedChildContentQuery.getNow().minuteOfHour().roundFloorCopy().toDate());
    }

    final List<ContentVersionKey> contentVersionKeys = relatedChildContentQuery.getContentVersions();
    for (int i = 0; i < contentVersionKeys.size(); i++) {
        compiled.setParameter("cv" + i, contentVersionKeys.get(i));
    }

    if (relatedChildContentQuery.hasSecurityFilter()) {
        compiled.setParameter("one", 1);
        List<GroupKey> securityFilter = Lists.newArrayList(relatedChildContentQuery.getSecurityFilter());
        for (int i = 0; i < securityFilter.size(); i++) {
            compiled.setParameter("g" + i, securityFilter.get(i));
        }
    }

    @SuppressWarnings({ "unchecked" })
    List<Object[]> list = compiled.list();

    final Set<ContentKey> contentKeys = new LinkedHashSet<ContentKey>(list.size());
    for (Object[] row : list) {
        contentKeys.add((ContentKey) row[1]);
    }

    final ContentMap contentMap = retrieveContent(contentKeys);

    final List<RelatedChildContent> relatedChildContrents = new ArrayList<RelatedChildContent>();
    for (Object[] row : list) {
        ContentVersionKey versionKey = (ContentVersionKey) row[0];
        ContentKey contentKey = (ContentKey) row[1];
        ContentEntity content = contentMap.get(contentKey);
        RelatedChildContent relatedChildContent = new RelatedChildContent(versionKey, content);
        relatedChildContrents.add(relatedChildContent);
    }

    return relatedChildContrents;
}

From source file:com.enonic.cms.store.dao.FindRelatedParentsCommand.java

License:Open Source License

Collection<RelatedParentContent> execute(final RelatedParentContentQuery relatedParentContentQuery) {
    if (relatedParentContentQuery.getContents() == null
            || relatedParentContentQuery.getContents().size() == 0) {
        throw new IllegalArgumentException("Given childContentKeys must contain values");
    }/*from   www  .  j  a v a  2  s. co m*/

    final String hql = getRelatedParentsByKeyHQL(relatedParentContentQuery);

    final Query compiled = hibernateTemplate.getSessionFactory().getCurrentSession().createQuery(hql);
    compiled.setCacheable(false);
    compiled.setReadOnly(true);
    compiled.setParameter("deleted", 0);

    if (!relatedParentContentQuery.isIncludeOfflineContent()) {
        compiled.setParameter("timeNow",
                relatedParentContentQuery.getNow().minuteOfHour().roundFloorCopy().toDate());
        compiled.setParameter("status", ContentStatus.APPROVED.getKey());
    }

    final List<ContentKey> childContentKeys = relatedParentContentQuery.getContents();
    for (int i = 0; i < childContentKeys.size(); i++) {
        compiled.setParameter("c" + i, childContentKeys.get(i));
    }

    if (relatedParentContentQuery.hasSecurityFilter()) {
        compiled.setParameter("one", 1);
        final List<GroupKey> securityFilter = Lists.newArrayList(relatedParentContentQuery.getSecurityFilter());
        for (int i = 0; i < securityFilter.size(); i++) {
            compiled.setParameter("g" + i, securityFilter.get(i));
        }
    }

    @SuppressWarnings({ "unchecked" })
    List<Object[]> list = compiled.list();

    final Set<ContentKey> contentKeys = new LinkedHashSet<ContentKey>(list.size());
    for (Object[] row : list) {
        contentKeys.add((ContentKey) row[2]);
    }

    final ContentMap contentMapByKey = retrieveContent(contentKeys);

    final List<RelatedParentContent> relatedChildContents = new ArrayList<RelatedParentContent>();
    for (Object[] row : list) {
        ContentKey childContentKey = (ContentKey) row[0];
        ContentVersionKey parentContentMainVersionKey = (ContentVersionKey) row[1];
        ContentKey parentContentKey = (ContentKey) row[2];
        ContentEntity parentContent = contentMapByKey.get(parentContentKey);
        RelatedParentContent relatedParentContent = new RelatedParentContent(childContentKey, parentContent,
                parentContentMainVersionKey);
        relatedChildContents.add(relatedParentContent);
    }

    return relatedChildContents;
}

From source file:com.mysema.query.jpa.hibernate.AbstractHibernateQuery.java

License:Apache License

private Query createQuery(String queryString, @Nullable QueryModifiers modifiers, boolean forCount) {
    Query query = session.createQuery(queryString);
    HibernateUtil.setConstants(query, getConstants(), getMetadata().getParams());
    if (fetchSize > 0) {
        query.setFetchSize(fetchSize);//from   w w w  . ja v  a  2  s .  co  m
    }
    if (timeout > 0) {
        query.setTimeout(timeout);
    }
    if (cacheable != null) {
        query.setCacheable(cacheable);
    }
    if (cacheRegion != null) {
        query.setCacheRegion(cacheRegion);
    }
    if (comment != null) {
        query.setComment(comment);
    }
    if (readOnly != null) {
        query.setReadOnly(readOnly);
    }
    for (Map.Entry<Path<?>, LockMode> entry : lockModes.entrySet()) {
        query.setLockMode(entry.getKey().toString(), entry.getValue());
    }
    if (flushMode != null) {
        query.setFlushMode(flushMode);
    }

    if (modifiers != null && modifiers.isRestricting()) {
        if (modifiers.getLimit() != null) {
            query.setMaxResults(modifiers.getLimit().intValue());
        }
        if (modifiers.getOffset() != null) {
            query.setFirstResult(modifiers.getOffset().intValue());
        }
    }

    // set transformer, if necessary
    List<? extends Expression<?>> projection = getMetadata().getProjection();
    if (projection.size() == 1 && !forCount) {
        Expression<?> expr = projection.get(0);
        if (expr instanceof FactoryExpression<?>) {
            query.setResultTransformer(
                    new FactoryExpressionTransformer((FactoryExpression<?>) projection.get(0)));
        }
    } else if (!forCount) {
        FactoryExpression<?> proj = FactoryExpressionUtils.wrap(projection);
        if (proj != null) {
            query.setResultTransformer(new FactoryExpressionTransformer(proj));
        }
    }
    return query;
}

From source file:com.oracle.coherence.hibernate.cachestore.HibernateCacheLoader.java

License:CDDL license

/**
 * Load a collection of Hibernate entities given a set of ids (keys)
 *
 * @param keys  the cache keys; specifically, the entity ids
 *
 * @return      the corresponding Hibernate entity instances
 *//*from   w w w  .  jav a 2  s. co  m*/
public Map loadAll(Collection keys) {
    ensureInitialized();

    Map results = new HashMap();

    Transaction tx = null;

    Session session = openSession();
    SessionImplementor sessionImplementor = (SessionImplementor) session;

    try {
        tx = session.beginTransaction();

        // Create the query
        String sQuery = getLoadAllQuery();
        Query query = session.createQuery(sQuery);

        // Prevent Hibernate from caching the results
        query.setCacheMode(CacheMode.IGNORE);
        query.setCacheable(false);
        query.setReadOnly(true);

        // Parameterize the query (where :keys = keys)
        query.setParameterList(PARAM_IDS, keys);

        // Need a way to extract the key from an entity that we know
        // nothing about.
        ClassMetadata classMetaData = getEntityClassMetadata();

        // Iterate through the results and place into the return map
        for (Iterator iter = query.list().iterator(); iter.hasNext();) {
            Object entity = iter.next();
            Object id = classMetaData.getIdentifier(entity, sessionImplementor);
            results.put(id, entity);
        }

        tx.commit();
    } catch (Exception e) {
        if (tx != null) {
            tx.rollback();
        }

        throw ensureRuntimeException(e);
    } finally {
        closeSession(session);
    }

    return results;
}

From source file:com.querydsl.jpa.hibernate.AbstractHibernateQuery.java

License:Apache License

private Query createQuery(@Nullable QueryModifiers modifiers, boolean forCount) {
    JPQLSerializer serializer = serialize(forCount);
    String queryString = serializer.toString();
    logQuery(queryString, serializer.getConstantToLabel());
    Query query = session.createQuery(queryString);
    HibernateUtil.setConstants(query, serializer.getConstantToLabel(), getMetadata().getParams());
    if (fetchSize > 0) {
        query.setFetchSize(fetchSize);//from   ww w.  j ava 2s  . co m
    }
    if (timeout > 0) {
        query.setTimeout(timeout);
    }
    if (cacheable != null) {
        query.setCacheable(cacheable);
    }
    if (cacheRegion != null) {
        query.setCacheRegion(cacheRegion);
    }
    if (comment != null) {
        query.setComment(comment);
    }
    if (readOnly != null) {
        query.setReadOnly(readOnly);
    }
    for (Map.Entry<Path<?>, LockMode> entry : lockModes.entrySet()) {
        query.setLockMode(entry.getKey().toString(), entry.getValue());
    }
    if (flushMode != null) {
        query.setFlushMode(flushMode);
    }

    if (modifiers != null && modifiers.isRestricting()) {
        Integer limit = modifiers.getLimitAsInteger();
        Integer offset = modifiers.getOffsetAsInteger();
        if (limit != null) {
            query.setMaxResults(limit);
        }
        if (offset != null) {
            query.setFirstResult(offset);
        }
    }

    // set transformer, if necessary
    Expression<?> projection = getMetadata().getProjection();
    if (!forCount && projection instanceof FactoryExpression) {
        query.setResultTransformer(new FactoryExpressionTransformer((FactoryExpression<?>) projection));
    }
    return query;
}

From source file:com.wci.umls.server.jpa.algo.RrfLoaderAlgorithm.java

License:Open Source License

/**
 * Load MRCONSO.RRF. This is responsible for loading {@link Atom}s and
 * {@link AtomClass}es./*from w  w w  .ja v  a2s  .  c  om*/
 *
 * @throws Exception the exception
 */
private void loadMrconso() throws Exception {
    logInfo("  Load MRCONSO");
    logInfo("  Insert atoms and concepts ");

    // Set up maps
    String line = null;

    int objectCt = 0;
    final PushBackReader reader = readers.getReader(RrfReaders.Keys.MRCONSO);
    final String fields[] = new String[18];
    String prevCui = null;
    Concept cui = null;
    while ((line = reader.readLine()) != null) {

        line = line.replace("\r", "");
        FieldedStringTokenizer.split(line, "|", 18, fields);

        // Skip non-matching in single mode
        if (singleMode && !fields[11].equals(getTerminology())) {
            continue;
        }

        // Field Description
        // 0 CUI
        // 1 LAT
        // 2 TS
        // 3 LUI
        // 4 STT
        // 5 SUI
        // 6 ISPREF
        // 7 AUI
        // 8 SAUI
        // 9 SCUI
        // 10 SDUI
        // 11 SAB
        // 12 TTY
        // 13 CODE
        // 14 STR
        // 15 SRL
        // 16 SUPPRESS
        // 17 CVF
        //
        // e.g.
        // C0000005|ENG|P|L0000005|PF|S0007492|Y|A7755565||M0019694|D012711|MSH|PEN|D012711|(131)I-Macroaggregated
        // Albumin|0|N|256|

        // set the root terminology language
        loadedRootTerminologies.get(fields[11]).setLanguage(fields[1]);

        final Atom atom = new AtomJpa();
        atom.setLanguage(fields[1]);
        atom.setTimestamp(releaseVersionDate);
        atom.setLastModified(releaseVersionDate);
        atom.setLastModifiedBy(loader);
        atom.setObsolete(fields[16].equals("O"));
        atom.setSuppressible(!fields[16].equals("N"));
        atom.setPublished(true);
        atom.setPublishable(true);
        atom.setName(fields[14]);
        atom.setTerminology(fields[11]);
        if (loadedTerminologies.get(fields[11]) == null) {
            throw new Exception("Atom references terminology that does not exist: " + fields[11]);
        }
        atom.setVersion(loadedTerminologies.get(fields[11]).getVersion());
        // skip in single mode
        if (!singleMode) {
            atom.putAlternateTerminologyId(getTerminology(), fields[7]);
        }
        atom.setTerminologyId(fields[8]);
        atom.setTermType(fields[12]);
        atom.setWorkflowStatus(published);

        atom.setCodeId(fields[13]);
        atom.setDescriptorId(fields[10]);
        atom.setConceptId(fields[9]);

        atom.setStringClassId(fields[5]);
        atom.setLexicalClassId(fields[3]);
        atom.setCodeId(fields[13]);

        // Handle root terminology short name, hierarchical name, and sy names
        if (fields[11].equals("SRC") && fields[12].equals("SSN")) {
            final Terminology t = loadedTerminologies.get(fields[13].substring(2));
            if (t == null || t.getRootTerminology() == null) {
                logError("  Null root " + line);
            } else {
                t.getRootTerminology().setShortName(fields[14]);
            }
        }
        if (fields[11].equals("SRC") && fields[12].equals("RHT")) {
            final Terminology t = loadedTerminologies.get(fields[13].substring(2));
            if (t == null || t.getRootTerminology() == null) {
                logError("  Null root " + line);
            } else {
                t.getRootTerminology().setHierarchicalName(fields[14]);
            }
        }

        if (fields[11].equals("SRC") && fields[12].equals("RPT")) {
            final Terminology t = loadedTerminologies.get(fields[13].substring(2));
            if (t == null || t.getRootTerminology() == null) {
                logError("  Null root " + line);
            } else {
                t.getRootTerminology().setPreferredName(fields[14]);
            }
        }
        if (fields[11].equals("SRC") && fields[12].equals("RSY") && !fields[14].equals("")) {
            final Terminology t = loadedTerminologies.get(fields[13].substring(2));
            if (t == null || t.getRootTerminology() == null) {
                logError("  Null root " + line);
            } else {
                List<String> syNames = t.getRootTerminology().getSynonymousNames();
                syNames.add(fields[14]);
            }
        }

        // Handle terminology sy names
        if (fields[11].equals("SRC") && fields[12].equals("VSY") && !fields[14].equals("")) {
            final Terminology t = loadedTerminologies.get(fields[13].substring(2));
            if (t == null || t.getRootTerminology() == null) {
                logError("  Null root " + line);
            } else {
                List<String> syNames = t.getSynonymousNames();
                syNames.add(fields[14]);
            }
        }

        // Determine organizing class type for terminology
        if (!atom.getDescriptorId().equals("")) {
            termIdTypeMap.put(atom.getTerminology(), IdType.DESCRIPTOR);
        } else if (!atom.getConceptId().equals("")) {
            termIdTypeMap.put(atom.getTerminology(), IdType.CONCEPT);
        } // OTHERWISE it remains "CODE"

        // skip in single mode
        if (!singleMode) {
            atom.putConceptTerminologyId(getTerminology(), fields[0]);
        }

        // Add atoms and commit periodically
        addAtom(atom);
        logAndCommit(++objectCt, RootService.logCt, RootService.commitCt);
        atomIdMap.put(fields[7], atom.getId());
        atomTerminologyMap.put(fields[7], atom.getTerminology().intern());
        atomConceptIdMap.put(fields[7], atom.getConceptId().length() == 0 ? "".intern() : atom.getConceptId());
        atomCodeIdMap.put(fields[7], atom.getCodeId().length() == 0 ? "".intern() : atom.getCodeId());
        atomDescriptorIdMap.put(fields[7],
                atom.getDescriptorId().length() == 0 ? "".intern() : atom.getDescriptorId());

        // CUI - skip in single mode
        if (!singleMode) {
            // Add concept
            if (prevCui == null || !fields[0].equals(prevCui)) {
                if (prevCui != null) {
                    cui.setName(getComputedPreferredName(cui, list));
                    addConcept(cui);
                    conceptIdMap.put(cui.getTerminology() + cui.getTerminologyId(), cui.getId());
                    logAndCommit(++objectCt, RootService.logCt, RootService.commitCt);
                }
                cui = new ConceptJpa();
                cui.setTimestamp(releaseVersionDate);
                cui.setLastModified(releaseVersionDate);
                cui.setLastModifiedBy(loader);
                cui.setPublished(true);
                cui.setPublishable(true);
                cui.setTerminology(getTerminology());
                cui.setTerminologyId(fields[0]);
                cui.setVersion(getVersion());
                cui.setWorkflowStatus(published);
            }
            cui.getAtoms().add(atom);
            prevCui = fields[0];
        }

        // Handle Subset
        // C3539934|ENG|S|L11195730|PF|S13913746|N|A23460885||900000000000538005||SNOMEDCT_US|SB|900000000000538005|Description
        // format|9|N|256|
        if (fields[12].equals("SB")) {

            // Have to handle the type later, when we get to attributes
            final AtomSubset atomSubset = new AtomSubsetJpa();
            setSubsetFields(atomSubset, fields);
            cuiAuiAtomSubsetMap.put(fields[0] + fields[7], atomSubset);
            idTerminologyAtomSubsetMap.put(atomSubset.getTerminologyId() + atomSubset.getTerminology(),
                    atomSubset);
            final ConceptSubset conceptSubset = new ConceptSubsetJpa();
            setSubsetFields(conceptSubset, fields);
            cuiAuiConceptSubsetMap.put(fields[0] + fields[7], conceptSubset);
            idTerminologyConceptSubsetMap.put(conceptSubset.getTerminologyId() + conceptSubset.getTerminology(),
                    conceptSubset);
        }

    }
    // Add last concept
    if (prevCui != null) {
        cui.setName(getComputedPreferredName(cui, list));
        addConcept(cui);
        conceptIdMap.put(cui.getTerminology() + cui.getTerminologyId(), cui.getId());
        logAndCommit(++objectCt, RootService.logCt, RootService.commitCt);
    }

    // Set the terminology organizing class types
    for (final Terminology terminology : loadedTerminologies.values()) {
        final IdType idType = termIdTypeMap.get(terminology.getTerminology());
        if (idType != null && idType != IdType.CODE) {
            terminology.setOrganizingClassType(idType);
            updateTerminology(terminology);
        }
    }

    logInfo("  Add concepts");
    objectCt = 0;
    // NOTE: Hibernate-specific to support iterating
    // Restrict to timestamp used for THESE atoms, in case multiple RRF
    // files are loaded
    final Session session = manager.unwrap(Session.class);
    org.hibernate.Query hQuery = session.createQuery("select a from AtomJpa a " + "where conceptId is not null "
            + "and conceptId != '' and timestamp = :timestamp " + "order by terminology, conceptId");
    hQuery.setParameter("timestamp", releaseVersionDate);
    hQuery.setReadOnly(true).setFetchSize(2000).setCacheable(false);
    ScrollableResults results = hQuery.scroll(ScrollMode.FORWARD_ONLY);
    prevCui = null;
    cui = null;
    while (results.next()) {
        final Atom atom = (Atom) results.get()[0];
        if (atom.getConceptId() == null || atom.getConceptId().isEmpty()) {
            continue;
        }
        if (prevCui == null || !prevCui.equals(atom.getConceptId())) {
            if (cui != null) {
                // compute preferred name
                cui.setName(getComputedPreferredName(cui, list));
                addConcept(cui);
                conceptIdMap.put(cui.getTerminology() + cui.getTerminologyId(), cui.getId());
                logAndCommit(++objectCt, RootService.logCt, RootService.commitCt);
            }
            cui = new ConceptJpa();
            cui.setTimestamp(releaseVersionDate);
            cui.setLastModified(releaseVersionDate);
            cui.setLastModifiedBy(loader);
            cui.setPublished(true);
            cui.setPublishable(true);
            cui.setTerminology(atom.getTerminology());
            cui.setTerminologyId(atom.getConceptId());
            cui.setVersion(atom.getVersion());
            cui.setWorkflowStatus(published);
        }
        cui.getAtoms().add(atom);
        prevCui = atom.getConceptId();
    }
    if (cui != null) {
        cui.setName(getComputedPreferredName(cui, list));
        addConcept(cui);
        conceptIdMap.put(cui.getTerminology() + cui.getTerminologyId(), cui.getId());
        commitClearBegin();
    }
    results.close();
    logInfo("  Add descriptors");
    objectCt = 0;

    // NOTE: Hibernate-specific to support iterating
    hQuery = session.createQuery("select a from AtomJpa a " + "where descriptorId is not null "
            + "and descriptorId != '' and timestamp = :timestamp " + "order by terminology, descriptorId");
    hQuery.setParameter("timestamp", releaseVersionDate);
    hQuery.setReadOnly(true).setFetchSize(2000).setCacheable(false);
    results = hQuery.scroll(ScrollMode.FORWARD_ONLY);
    String prevDui = null;
    Descriptor dui = null;
    while (results.next()) {
        final Atom atom = (Atom) results.get()[0];
        if (atom.getDescriptorId() == null || atom.getDescriptorId().isEmpty()) {
            continue;
        }
        if (prevDui == null || !prevDui.equals(atom.getDescriptorId())) {
            if (dui != null) {
                // compute preferred name
                dui.setName(getComputedPreferredName(dui, list));
                addDescriptor(dui);
                descriptorIdMap.put(dui.getTerminology() + dui.getTerminologyId(), dui.getId());
                logAndCommit(++objectCt, RootService.logCt, RootService.commitCt);
            }
            dui = new DescriptorJpa();
            dui.setTimestamp(releaseVersionDate);
            dui.setLastModified(releaseVersionDate);
            dui.setLastModifiedBy(loader);
            dui.setPublished(true);
            dui.setPublishable(true);
            dui.setTerminology(atom.getTerminology());
            dui.setTerminologyId(atom.getDescriptorId());
            dui.setVersion(atom.getVersion());
            dui.setWorkflowStatus(published);
        }
        dui.getAtoms().add(atom);
        prevDui = atom.getDescriptorId();
    }
    if (dui != null) {
        dui.setName(getComputedPreferredName(dui, list));
        addDescriptor(dui);
        descriptorIdMap.put(dui.getTerminology() + dui.getTerminologyId(), dui.getId());
        commitClearBegin();
    }
    results.close();

    // Use flag to decide whether to handle codes
    if (codesFlag) {
        logInfo("  Add codes");
        objectCt = 0;
        // NOTE: Hibernate-specific to support iterating
        // Skip NOCODE
        // TODO: there is a LNC exception here -for now
        hQuery = session.createQuery("select a from AtomJpa a where codeId is not null "
                + "and codeId != '' and timestamp = :timestamp "
                + "and (terminology = 'LNC' OR (codeId != conceptId and codeId != descriptorId)) "
                + "and timestamp = :timestamp " + "order by terminology, codeId");
        hQuery.setParameter("timestamp", releaseVersionDate);
        hQuery.setReadOnly(true).setFetchSize(2000).setCacheable(false);
        results = hQuery.scroll(ScrollMode.FORWARD_ONLY);
        String prevCode = null;
        Code code = null;
        while (results.next()) {
            final Atom atom = (Atom) results.get()[0];
            if (atom.getCodeId() == null || atom.getCodeId().isEmpty() || atom.getCodeId().equals("NOCODE")) {
                continue;
            }
            if (prevCode == null || !prevCode.equals(atom.getCodeId())) {
                if (code != null) {
                    // compute preferred name
                    code.setName(getComputedPreferredName(code, list));
                    addCode(code);
                    codeIdMap.put(code.getTerminology() + code.getTerminologyId(), code.getId());
                    logAndCommit(++objectCt, RootService.logCt, 1000);
                }
                code = new CodeJpa();
                code.setTimestamp(releaseVersionDate);
                code.setLastModified(releaseVersionDate);
                code.setLastModifiedBy(loader);
                code.setPublished(true);
                code.setPublishable(true);
                code.setTerminology(atom.getTerminology());
                code.setTerminologyId(atom.getCodeId());
                code.setVersion(atom.getVersion());
                code.setWorkflowStatus(published);
            }
            code.getAtoms().add(atom);
            prevCode = atom.getCodeId();
        }
        if (code != null) {
            code.setName(getComputedPreferredName(code, list));
            addCode(code);
            codeIdMap.put(code.getTerminology() + code.getTerminologyId(), code.getId());
            commitClearBegin();
        }
        results.close();
    }

    // NOTE: for efficiency and lack of use cases, we've temporarily
    // suspended the loading of LexicalClass and StringClass objects

    // // NOTE: atoms are not connected to lexical classes as there are
    // // currently no known uses for this.
    // logInfo(" Add lexical classes");
    // objectCt = 0;
    // query = NEED TO FIX THIS
    // manager
    // .createQuery("select a.id from AtomJpa a order by lexicalClassId");
    // String prevLui = null;
    // LexicalClass lui = null;
    // LexicalClass atoms = null;
    // for (final Long id : (List<Long>) query.getResultList()) {
    // final Atom atom = getAtom(id);
    // if (atom.getLexicalClassId() == null
    // || atom.getLexicalClassId().isEmpty()) {
    // continue;
    // }
    // if (prevLui == null || !prevLui.equals(atom.getLexicalClassId())) {
    // if (lui != null) {
    // // compute preferred name
    // lui.setName(getComputedPreferredName(atoms));
    // addLexicalClass(lui);
    // logAndCommit(++objectCt, RootService.logCt, RootService.commitCt);
    // }
    // // just used to hold atoms, enver saved.
    // atoms = new LexicalClassJpa();
    // lui = new LexicalClassJpa();
    // lui.setTimestamp(releaseVersionDate);
    // lui.setLastModified(releaseVersionDate);
    // lui.setLastModifiedBy(loader);
    // lui.setPublished(true);
    // lui.setPublishable(true);
    // lui.setTerminology(terminology);
    // lui.setTerminologyId(atom.getLexicalClassId());
    // lui.setVersion(version);
    // lui.setWorkflowStatus(published);
    // lui.setNormalizedString(getNormalizedString(atom.getName()));
    // }
    // atoms.addAtom(atom);
    // prevLui = atom.getLexicalClassId();
    // }
    // if (lui != null) {
    // lui.setName(getComputedPreferredName(atoms));
    // commitClearBegin();
    // logAndCommit(++objectCt, RootService.logCt, RootService.commitCt);
    // }
    //
    // // NOTE: currently atoms are not loaded for string classes
    // // We simply load the objects themselves ( for SUI maintenance)
    // // There are no known use cases for having the atoms here.
    // logInfo(" Add string classes");
    // objectCt = 0;
    // query = NEED TO FIX THIS
    // manager
    // .createQuery("select distinct stringClassId, name from AtomJpa a");
    // for (final Object[] suiFields : (List<Object[]>) query.getResultList()) {
    // final StringClass sui = new StringClassJpa();
    // sui.setTimestamp(releaseVersionDate);
    // sui.setLastModified(releaseVersionDate);
    // sui.setLastModifiedBy(loader);
    // sui.setPublished(true);
    // sui.setPublishable(true);
    // sui.setTerminology(terminology);
    // sui.setTerminologyId(suiFields[0].toString());
    // sui.setVersion(version);
    // sui.setWorkflowStatus(published);
    // sui.setName(suiFields[1].toString());
    // addStringClass(sui);
    // logAndCommit(++objectCt, RootService.logCt, RootService.commitCt);
    // }

    // commit
    commitClearBegin();

    logInfo("  Update terminologies for languages and names.");

    // Update all root terminologies now that we know languages and names
    for (final RootTerminology root : loadedRootTerminologies.values()) {
        updateRootTerminology(root);
    }

    // Update all root terminologies now that we know languages and names
    for (final Terminology terminology : loadedTerminologies.values()) {
        updateTerminology(terminology);
    }
    commitClearBegin();

}

From source file:de.tudarmstadt.ukp.lmf.transform.DBToXMLTransformer.java

License:Apache License

protected void doTransform(boolean includeAxes, final Lexicon... includeLexicons) throws SAXException {
    final int bufferSize = 100;
    commitCounter = 1;/*from   w ww  . j  a v a  2 s  . c  om*/

    writeStartElement(lexicalResource);

    // Iterate over all lexicons
    if (includeLexicons == null || includeLexicons.length > 0) {
        for (Lexicon lexicon : lexicalResource.getLexicons()) {
            String lexiconName = lexicon.getName();

            // Check if we want to include this lexicon.
            if (includeLexicons != null) {
                boolean found = false;
                for (Lexicon l : includeLexicons) {
                    if (lexiconName.equals(l.getName())) {
                        found = true;
                        break;
                    }
                }
                if (!found) {
                    continue;
                }
            }

            logger.info("Processing lexicon: " + lexiconName);
            writeStartElement(lexicon);

            // Iterate over all possible sub-elements of this Lexicon and
            // write them to the XML
            Class<?>[] lexiconClassesToSave = { LexicalEntry.class, SubcategorizationFrame.class,
                    SubcategorizationFrameSet.class, SemanticPredicate.class, Synset.class,
                    SynSemCorrespondence.class,
                    //ConstraintSet.class
            };

            //  "Unfortunately, MySQL does not treat large offset values efficiently by default and will still read all the rows prior to an offset value. It is common to see a query with an offset above 100,000 take over 20 times longer than an offset of zero!"
            // http://www.numerati.com/2012/06/26/reading-large-result-sets-with-hibernate-and-mysql/
            for (Class<?> clazz : lexiconClassesToSave) {
                /*DetachedCriteria criteria = DetachedCriteria.forClass(clazz)
                      .add(Restrictions.sqlRestriction("lexiconId = '" + lexicon.getId() + "'"));
                CriteriaIterator<Object> iter = new CriteriaIterator<Object>(criteria, sessionFactory, bufferSize);
                while (iter.hasNext()) {
                   Object obj = iter.next();
                   writeElement(obj);
                   session.evict(obj);
                   commitCounter++;
                   if (commitCounter % 1000 == 0)
                      logger.info("progress: " + commitCounter  + " class instances written to file");
                }*/
                Session lookupSession = sessionFactory.openSession();
                Query query = lookupSession.createQuery("FROM " + clazz.getSimpleName() + " WHERE lexiconId = '"
                        + lexicon.getId() + "' ORDER BY id");
                query.setReadOnly(true);
                if (DBConfig.MYSQL.equals(dbConfig.getDBType())) {
                    query.setFetchSize(Integer.MIN_VALUE); // MIN_VALUE gives hint to JDBC driver to stream results
                } else {
                    query.setFetchSize(1000);
                }
                ScrollableResults results = query.scroll(ScrollMode.FORWARD_ONLY);
                while (results.next()) {
                    // For streamed query results, no further queries are allowed (incl. lazy proxy queries!)
                    // Detach the object from the lookup session and reload it using the "official" session.
                    Object[] rows = results.get();
                    Object row = rows[0];
                    lookupSession.evict(row);
                    lookupSession.evict(rows);
                    rows = null;
                    row = session.get(row.getClass(), ((IHasID) row).getId());
                    writeElement(row);
                    session.evict(row);
                    row = null;
                    commitCounter++;
                    if (commitCounter % 1000 == 0) {
                        logger.info("progress: " + commitCounter + " class instances written to file");
                    }
                    if (commitCounter % 10000 == 0) {
                        closeSession();
                        openSession();
                    }
                }
                results.close();
                lookupSession.close();
            }
            writeEndElement(lexicon);
        }
    }

    // Iterate over SenseAxes and write them to XMLX when not only
    // lexicons should be converted
    if (includeAxes) {
        logger.info("Processing sense axes");
        DetachedCriteria criteria = DetachedCriteria.forClass(SenseAxis.class)
                .add(Restrictions.sqlRestriction("lexicalResourceId = '" + lexicalResource.getName() + "'"));
        CriteriaIterator<Object> iter = new CriteriaIterator<Object>(criteria, sessionFactory, bufferSize);
        while (iter.hasNext()) {
            Object obj = iter.next();
            writeElement(obj);
            session.evict(obj);
            commitCounter++;
            if (commitCounter % 1000 == 0) {
                logger.info("progress: " + commitCounter + " class instances written to file");
            }
        }

        logger.info("Processing predicateargument axes");
        DetachedCriteria criteria2 = DetachedCriteria.forClass(PredicateArgumentAxis.class)
                .add(Restrictions.sqlRestriction("lexicalResourceId = '" + lexicalResource.getName() + "'"));
        CriteriaIterator<Object> iter2 = new CriteriaIterator<Object>(criteria2, sessionFactory, bufferSize);
        while (iter2.hasNext()) {
            Object obj = iter2.next();
            writeElement(obj);
            session.evict(obj);
            commitCounter++;
            if (commitCounter % 1000 == 0) {
                logger.info("progress: " + commitCounter + " class instances written to file");
            }
        }

    }
    writeEndElement(lexicalResource);

    writeEndDocument();
}