Example usage for org.hibernate LockOptions LockOptions

List of usage examples for org.hibernate LockOptions LockOptions

Introduction

In this page you can find the example usage for org.hibernate LockOptions LockOptions.

Prototype

public LockOptions(LockMode lockMode) 

Source Link

Document

Constructs a LockOptions with the given lock mode.

Usage

From source file:com.liferay.portal.dao.orm.hibernate.QueryImpl.java

License:Open Source License

public Query setLockMode(String alias, LockMode lockMode) {
    org.hibernate.LockMode hibernateLockMode = LockModeTranslator.translate(lockMode);

    LockOptions lockOptions = new LockOptions(hibernateLockMode);

    lockOptions.setAliasSpecificLockMode(alias, hibernateLockMode);

    _query.setLockOptions(lockOptions);/*from   w ww. ja  v a  2 s.  co  m*/

    return this;
}

From source file:com.lighting.platform.base.dao.SimpleHibernateDao.java

License:Apache License

/**
 * ??,  select ... for update/* w  w w. j ava 2  s  .  c om*/
 * ?, ???. 
 */
public void pessimisticLock(T entity) {
    LockOptions option = new LockOptions(LockMode.PESSIMISTIC_WRITE);
    getSession().buildLockRequest(option).lock(entity);
}

From source file:de.innovationgate.webgate.api.jdbc.WGDatabaseImpl.java

License:Open Source License

private Long upgradeFileStorage(Logger log) throws WGAPIException {

    _ugradeFileStorageRunning = true;//from  w  w  w .ja va2  s .  c  om
    try {

        if (_csVersion.getVersion() < 5.0 || _csVersion.getPatchLevel() < 4) {
            log.error("This task needs a content store of version 5.0 patch level 4 or higher");
            return 0L;
        }

        CS5P4FileHandling fileHandling = ((CS5P4FileHandling) _fileHandling);
        long freedMemory = 0;

        while (true) {
            String metaHql = "from ContentFileMeta as meta where meta.checksumSha512 is null";
            Iterator oldMetas = getSession().createQuery(metaHql).iterate();
            try {
                int counter = 0;
                if (!oldMetas.hasNext()) {
                    break;
                }

                while (oldMetas.hasNext() && counter < 100) {
                    ContentFileMeta meta = (ContentFileMeta) oldMetas.next();
                    getSession().setReadOnly(meta, false);
                    LockRequest lockRequest = getSession()
                            .buildLockRequest(new LockOptions(LockMode.PESSIMISTIC_WRITE));
                    lockRequest.lock(meta);

                    try {

                        // Just-for-sure check if this is really not yet migrated
                        getSession().refresh(meta);
                        if (meta.getChecksumSha512() != null) {
                            rollbackHibernateTransaction(false);
                            continue;
                        }

                        log.info("Database: " + getDb().getDbReference() + ": Upgrading storage of file '"
                                + meta.getName() + "' from document '" + meta.getParentcontent().getTitle()
                                + "' (" + meta.getParentcontent().getStructentry().getKey() + "."
                                + meta.getParentcontent().getLanguage().getName() + "."
                                + meta.getParentcontent().getVersion() + ")");

                        // Select file parts
                        String hqlQuery = "select cfp from ContentFilePart as cfp where cfp.meta=:metaEntity order by cfp.partnr asc";
                        Query query = getSession().createQuery(hqlQuery);
                        query.setParameter("metaEntity", meta);

                        // Migrate file parts to filecontents parts
                        InputStream in = new HibernateQueryInputStream(fileHandling.getParent().getSession(),
                                query, 0, isOptimizedFileHandlingDisableQueryPaging());
                        try {
                            fileHandling.storeFileContents(meta, new CS5P4ContentFileDescriptor(), in);
                        } finally {
                            in.close();
                        }

                        // Delete file parts
                        Query deletionQuery = getSession()
                                .createQuery("delete ContentFilePart cfp where cfp.meta = :meta");
                        deletionQuery.setEntity("meta", meta);
                        deletionQuery.executeUpdate();

                        // Commit so we can read the file afterwards
                        commitHibernateTransaction();

                        /*
                        // Annotate the file
                        WGDocumentImpl doc = createDocumentImpl(meta.getParentcontent());
                        TemporaryFile tempFile = new TemporaryFile(meta.getName(), doc.getFileData(meta.getName()), WGFactory.getTempDir());
                        try {
                        WGFileMetaData md = new WGFileMetaData(new WGDocument.FakeMetaDataContext(), meta.getName(), meta.getSize(), meta.getCreated(), meta.getLastmodified(), meta.getChecksum(), meta.getChecksumSha512(), fileHandling.loadMdExtensionData(doc, meta));
                        _db.annotateMetadata(tempFile.getFile(), md, null);
                        fileHandling.storeMdExtensionData(doc, md, meta);
                        if (isSaveIsolationActive()) {
                            getSession().update(meta); // This will not be able to store binary extension data, which however cannot be present before upgrading the file storage
                        }
                        }
                        finally {
                        tempFile.delete();
                        }
                        commitHibernateTransaction();
                        */
                    } catch (Throwable e) {
                        log.error("Exception upgrading file", e);
                        rollbackHibernateTransaction(false);
                    }
                    counter++;
                }

                log.info("Clearing session cache");
                refresh();
                log.info("Running file storage maintenance to remove duplicate file data");
                freedMemory += dailyMaintenance(log);
            } finally {
                Hibernate.close(oldMetas);
            }
        }

        log.info("Database: " + getDb().getDbReference() + ": Upgrading file storage freed "
                + WGUtils.DECIMALFORMAT_STANDARD.format(freedMemory / 1024 / 1024)
                + " MB of file storage memory.");
        return freedMemory;

    } finally {
        _ugradeFileStorageRunning = false;
    }

}

From source file:net.e6tech.elements.persist.hibernate.ModifiedTableGenerator.java

License:Apache License

@SuppressWarnings("unchecked")
protected String buildSelectQuery(Dialect dialect) {
    final String alias = "tbl";
    final String query = "select " + StringHelper.qualify(alias, valueColumnName) + " from " + renderedTableName
            + ' ' + alias + " where " + StringHelper.qualify(alias, segmentColumnName) + "=?";
    final LockOptions lockOptions = new LockOptions(LockMode.PESSIMISTIC_WRITE);
    lockOptions.setAliasSpecificLockMode(alias, LockMode.PESSIMISTIC_WRITE);
    final Map updateTargetColumnsMap = Collections.singletonMap(alias, new String[] { valueColumnName });
    return dialect.applyLocksToSql(query, lockOptions, updateTargetColumnsMap);
}

From source file:org.brekka.commons.persistence.dao.hibernate.AbstractIdentifiableEntityHibernateDAO.java

License:Apache License

@SuppressWarnings("unchecked")
@Override//  www . j  a  v  a  2s  .  c o m
public Entity retrieveById(final ID entityId, final LockModeType lockModeType, final int timeout,
        final TimeUnit timeUnit) {
    Session session = getCurrentSession();
    LockMode lockMode = getLockMode(lockModeType);
    LockOptions lockOptions = new LockOptions(lockMode);
    if (timeout > -1) {
        lockOptions.setTimeOut((int) timeUnit.toMillis(timeout));
    }
    Entity entity = (Entity) session.get(type(), entityId, lockOptions);
    preRead(entity);
    return entity;
}

From source file:org.brekka.commons.persistence.dao.hibernate.AbstractIdentifiableEntityHibernateDAO.java

License:Apache License

protected Entity queryById(final ID entityId, final String idFieldName, final String hql,
        final LockModeType lockModeType, final int timeout, final TimeUnit timeUnit) {
    LockMode lockMode = getLockMode(lockModeType);
    LockOptions lockOptions = new LockOptions(lockMode);
    if (timeout > -1) {
        lockOptions.setTimeOut((int) timeUnit.toMillis(timeout));
    }/* w ww  .  j  av a2  s.co  m*/
    Entity entity = type().cast(
            getCurrentSession().createQuery(hql).setLockOptions(lockOptions).setParameter(idFieldName, entityId)
                    .setResultTransformer(FirstResultTransformer.INSTANCE).uniqueResult());
    preRead(entity);
    return entity;
}

From source file:org.grails.orm.hibernate.GrailsHibernateTemplate.java

License:Apache License

public <T> T lock(final Class<T> entityClass, final Serializable id, final LockMode lockMode)
        throws DataAccessException {
    return doExecute(new HibernateCallback<T>() {
        @SuppressWarnings("unchecked")
        public T doInHibernate(Session session) throws HibernateException {
            return (T) session.get(entityClass, id, new LockOptions(lockMode));
        }//from w ww .j a v a2s.co m
    }, true);
}

From source file:org.grails.orm.hibernate.GrailsHibernateTemplate.java

License:Apache License

public void lock(final Object entity, final LockMode lockMode) throws DataAccessException {
    doExecute(new HibernateCallback<Object>() {
        public Object doInHibernate(Session session) throws HibernateException {
            session.buildLockRequest(new LockOptions(lockMode)).lock(entity);//LockMode.PESSIMISTIC_WRITE
            return null;
        }//from  w w  w.  j  a va2 s  .  c  o m
    }, true);
}

From source file:org.grails.orm.hibernate.GrailsHibernateTemplate.java

License:Apache License

public void refresh(final Object entity, final LockMode lockMode) throws DataAccessException {
    doExecute(new HibernateCallback<Object>() {
        public Object doInHibernate(Session session) throws HibernateException {
            if (lockMode == null) {
                session.refresh(entity);
            } else {
                session.refresh(entity, new LockOptions(lockMode));
            }//w w  w  .j  a  va 2 s  .  co m
            return null;
        }
    }, true);
}

From source file:org.intelligentsia.utility.generator.TableGenerator.java

License:Apache License

public void configure(final DataSource dataSource, final Dialect dialect, final String tableName,
        final String keyValue, final long initialValue) {
    this.tableName = tableName;
    this.dataSource = dataSource;
    insert = new StringBuilder("INSERT into ").append(tableName).append(" values ('").append(keyValue)
            .append("', ").append(initialValue).append(")").toString();
    query = new StringBuilder("select NEXT_VAL from ")
            .append(dialect.appendLockHint(new LockOptions(LockMode.PESSIMISTIC_WRITE), tableName))
            .append(" where VAL_ID='").append(keyValue).append("' ").append(dialect.getForUpdateString())
            .toString();//  www. ja  v a  2 s.  co  m
    update = new StringBuilder("update ").append(tableName).append(" set NEXT_VAL=? WHERE VAL_ID='")
            .append(keyValue).append("' AND NEXT_VAL=?").toString();
}