Example usage for org.springframework.dao InvalidDataAccessApiUsageException InvalidDataAccessApiUsageException

List of usage examples for org.springframework.dao InvalidDataAccessApiUsageException InvalidDataAccessApiUsageException

Introduction

In this page you can find the example usage for org.springframework.dao InvalidDataAccessApiUsageException InvalidDataAccessApiUsageException.

Prototype

public InvalidDataAccessApiUsageException(String msg) 

Source Link

Document

Constructor for InvalidDataAccessApiUsageException.

Usage

From source file:com.frank.search.solr.core.query.Criteria.java

private void assertValuesPresent(Object... values) {
    if (values.length == 0 || (values.length > 1 && values[1] instanceof Collection)) {
        throw new InvalidDataAccessApiUsageException("At least one element "
                + (values.length > 0 ? ("of argument of type " + values[1].getClass().getName()) : "")
                + " has to be present.");
    }//  ww  w . ja v  a2s  .  co  m
}

From source file:lib.JdbcTemplate.java

@Override
public int[] batchUpdate(final String... sql) throws DataAccessException {
    Assert.notEmpty(sql, "SQL array must not be empty");
    if (logger.isDebugEnabled()) {
        logger.debug("Executing SQL batch update of " + sql.length + " statements");
    }//from w  ww  .ja  v a2  s .  c  o  m

    class BatchUpdateStatementCallback implements StatementCallback<int[]>, SqlProvider {

        private String currSql;

        @Override
        public int[] doInStatement(Statement stmt) throws SQLException, DataAccessException {
            int[] rowsAffected = new int[sql.length];
            if (JdbcUtils.supportsBatchUpdates(stmt.getConnection())) {
                for (String sqlStmt : sql) {
                    this.currSql = appendSql(this.currSql, sqlStmt);
                    stmt.addBatch(sqlStmt);
                }
                try {
                    rowsAffected = stmt.executeBatch();
                } catch (BatchUpdateException ex) {
                    String batchExceptionSql = null;
                    for (int i = 0; i < ex.getUpdateCounts().length; i++) {
                        if (ex.getUpdateCounts()[i] == Statement.EXECUTE_FAILED) {
                            batchExceptionSql = appendSql(batchExceptionSql, sql[i]);
                        }
                    }
                    if (StringUtils.hasLength(batchExceptionSql)) {
                        this.currSql = batchExceptionSql;
                    }
                    throw ex;
                }
            } else {
                for (int i = 0; i < sql.length; i++) {
                    this.currSql = sql[i];
                    if (!stmt.execute(sql[i])) {
                        rowsAffected[i] = stmt.getUpdateCount();
                    } else {
                        throw new InvalidDataAccessApiUsageException("Invalid batch SQL statement: " + sql[i]);
                    }
                }
            }
            return rowsAffected;
        }

        private String appendSql(String sql, String statement) {
            return (StringUtils.isEmpty(sql) ? statement : sql + "; " + statement);
        }

        @Override
        public String getSql() {
            return this.currSql;
        }
    }

    return execute(new BatchUpdateStatementCallback());
}

From source file:cc.tooyoung.common.db.JdbcTemplate.java

public int[] batchUpdate(final String[] sql) throws DataAccessException {
    Assert.notEmpty(sql, "SQL array must not be empty");
    if (ApiLogger.isTraceEnabled()) {
        ApiLogger.trace(new StringBuilder(128).append("Executing SQL batch update of ").append(sql.length)
                .append(" statements"));
    }// www  .j a  v a2  s  . com

    class BatchUpdateStatementCallback implements StatementCallback, SqlProvider {
        private String currSql;

        public Object doInStatement(Statement stmt) throws SQLException, DataAccessException {
            int[] rowsAffected = new int[sql.length];
            if (JdbcUtils.supportsBatchUpdates(stmt.getConnection())) {
                for (int i = 0; i < sql.length; i++) {
                    this.currSql = sql[i];
                    stmt.addBatch(sql[i]);
                }
                rowsAffected = stmt.executeBatch();
            } else {
                for (int i = 0; i < sql.length; i++) {
                    this.currSql = sql[i];
                    if (!stmt.execute(sql[i])) {
                        rowsAffected[i] = stmt.getUpdateCount();
                    } else {
                        throw new InvalidDataAccessApiUsageException("Invalid batch SQL statement: " + sql[i]);
                    }
                }
            }
            return rowsAffected;
        }

        public String getSql() {
            return currSql;
        }
    }
    return (int[]) execute(new BatchUpdateStatementCallback(), true);
}

From source file:com._4dconcept.springframework.data.marklogic.core.MarklogicTemplate.java

private void assertAutoGenerableIdIfNotSet(Object entity) {
    MarklogicPersistentEntity<?> persistentEntity = retrievePersistentEntity(entity.getClass());
    MarklogicPersistentProperty idProperty = persistentEntity.getIdProperty();

    if (idProperty == null) {
        return;//from   w w w.j a va2 s . c o m
    }

    Object idValue = persistentEntity.getPropertyAccessor(entity).getProperty(idProperty);

    if (idValue == null && !MarklogicSimpleTypes.AUTOGENERATED_ID_TYPES.contains(idProperty.getType())) {
        throw new InvalidDataAccessApiUsageException(
                String.format("Cannot auto-generate id of type %s for entity of type %s!",
                        idProperty.getType().getName(), entity.getClass().getName()));
    }
}

From source file:com._4dconcept.springframework.data.marklogic.core.MarklogicTemplate.java

private MarklogicIdentifier resolveMarklogicIdentifier(Object object) {
    MarklogicPersistentEntity<?> persistentEntity = retrievePersistentEntity(object.getClass());
    MarklogicPersistentProperty idProperty = persistentEntity.getIdProperty();

    if (idProperty == null)
        throw new InvalidDataAccessApiUsageException("Unable to retrieve expected identifier property !");

    Object id = persistentEntity.getPropertyAccessor(object).getProperty(idProperty);

    if (id == null) {
        throw new NullPointerException("Id is not expected to be null");
    }/*from  w  w  w . ja v  a2  s  . co  m*/

    return resolveMarklogicIdentifier(id, idProperty);
}

From source file:com._4dconcept.springframework.data.marklogic.core.MarklogicTemplate.java

@Nullable
private Object retrieveIdentifier(Object object) {
    MarklogicPersistentEntity<?> persistentEntity = retrievePersistentEntity(object.getClass());
    MarklogicPersistentProperty idProperty = persistentEntity.getIdProperty();

    if (idProperty == null)
        throw new InvalidDataAccessApiUsageException("Unable to retrieve expected identifier property !");

    return persistentEntity.getPropertyAccessor(object).getProperty(idProperty);
}

From source file:com._4dconcept.springframework.data.marklogic.core.MarklogicTemplate.java

private <T> MarklogicIdentifier resolveMarklogicIdentifier(Object id, Class<T> entityClass) {
    MarklogicPersistentProperty idProperty = getIdPropertyFor(entityClass);

    if (idProperty == null)
        throw new InvalidDataAccessApiUsageException("Unable to retrieve expected identifier property !");

    return resolveMarklogicIdentifier(id, idProperty);
}

From source file:org.openvpms.component.business.dao.hibernate.im.IMObjectDAOHibernate.java

/**
 * Check whether write operations are allowed on the given Session.
 * <p>Default implementation throws an InvalidDataAccessApiUsageException
 * in case of FlushMode.NEVER. Can be overridden in subclasses.
 *
 * @param template the hibernate template
 * @param session  the current session/*from  ww w  .j  av  a  2  s  .  c  o m*/
 * @throws InvalidDataAccessApiUsageException
 *          if write operations are not allowed
 * @see HibernateTemplate#checkWriteOperationAllowed(Session)
 */
private void checkWriteOperationAllowed(HibernateTemplate template, Session session)
        throws InvalidDataAccessApiUsageException {
    if (template.isCheckWriteOperations() && template.getFlushMode() != HibernateTemplate.FLUSH_EAGER
            && FlushMode.MANUAL.equals(session.getFlushMode())) {
        throw new InvalidDataAccessApiUsageException("Write operations are not allowed in read-only mode "
                + "(FlushMode.MANUAL) - turn your Session into "
                + "FlushMode.AUTO or remove 'readOnly' marker from " + "transaction definition");
    }
}