Example usage for org.springframework.jdbc.datasource DataSourceTransactionManager DataSourceTransactionManager

List of usage examples for org.springframework.jdbc.datasource DataSourceTransactionManager DataSourceTransactionManager

Introduction

In this page you can find the example usage for org.springframework.jdbc.datasource DataSourceTransactionManager DataSourceTransactionManager.

Prototype

public DataSourceTransactionManager(DataSource dataSource) 

Source Link

Document

Create a new DataSourceTransactionManager instance.

Usage

From source file:org.ohmage.query.impl.AnnotationQueries.java

@Override
public void createPromptResponseAnnotation(final UUID annotationId, final String client, final Long time,
        final DateTimeZone timezone, final String annotationText, Integer promptResponseId)
        throws DataAccessException {

    // Create the transaction.
    DefaultTransactionDefinition def = new DefaultTransactionDefinition();
    def.setName("Creating a new prompt response annotation.");

    try {//w ww.j a  v  a2s .  c  o  m
        // Begin the transaction.
        PlatformTransactionManager transactionManager = new DataSourceTransactionManager(getDataSource());
        TransactionStatus status = transactionManager.getTransaction(def);
        long id = 0;

        try {
            id = insertAnnotation(annotationId, time, timezone, client, annotationText);
        } catch (org.springframework.dao.DataAccessException e) {

            transactionManager.rollback(status);
            throw new DataAccessException("Error while executing SQL '" + SQL_INSERT_ANNOTATION
                    + "' with parameters: " + annotationId + ", " + time + ", " + timezone.getID() + ", "
                    + client + ", " + ((annotationText.length() > 25) ? annotationText.substring(0, 25) + "..."
                            : annotationText),
                    e);
        }
        try {
            // Insert the link between the prompt_response and its annotation
            getJdbcTemplate().update(SQL_INSERT_PROMPT_RESPONSE_ANNOTATION, promptResponseId, id);

        } catch (org.springframework.dao.DataAccessException e) {

            transactionManager.rollback(status);
            throw new DataAccessException("Error while executing SQL '" + SQL_INSERT_PROMPT_RESPONSE_ANNOTATION
                    + "' with parameters: " + promptResponseId + ", " + id, e);
        }

        // Commit the transaction.
        try {
            transactionManager.commit(status);
        } catch (TransactionException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("Error while committing the transaction.", e);
        }

    } catch (TransactionException e) {
        throw new DataAccessException("Error while attempting to rollback the transaction.", e);
    }
}

From source file:data.DefaultExchanger.java

public void importData(String dbName, JsonParser parser, JdbcTemplate jdbcTemplate) throws IOException {
    PlatformTransactionManager tm = new DataSourceTransactionManager(jdbcTemplate.getDataSource());
    TransactionStatus ts = tm.getTransaction(new DefaultTransactionDefinition());

    try {//from  w  ww.  ja va  2 s  . com
        if (dbName.equals("MySQL")) {
            jdbcTemplate.update("SET FOREIGN_KEY_CHECKS = 0");
            jdbcTemplate.update("SET NAMES \'utf8mb4\'");
        }

        final Configuration config = Configuration.root();
        int batchSize = config.getInt(DATA_BATCH_SIZE_KEY, DEFAULT_BATCH_SIZE);
        if (parser.nextToken() != JsonToken.END_OBJECT) {
            String fieldName = parser.getCurrentName();
            play.Logger.debug("importing {}", fieldName);
            if (fieldName.equalsIgnoreCase(getTable())) {
                truncateTable(jdbcTemplate);
                JsonToken current = parser.nextToken();
                if (current == JsonToken.START_ARRAY) {
                    importDataFromArray(parser, jdbcTemplate, batchSize);
                    importSequence(dbName, parser, jdbcTemplate);
                } else {
                    play.Logger.info("Error: records should be an array: skipping.");
                    parser.skipChildren();
                }
            }
        }
        tm.commit(ts);
    } catch (Exception e) {
        e.printStackTrace();
        tm.rollback(ts);
    } finally {
        if (dbName.equals("MySQL")) {
            jdbcTemplate.update("SET FOREIGN_KEY_CHECKS = 1");
        }
    }
}

From source file:org.ohmage.query.impl.UserMobilityQueries.java

/**
 * Creates a new Mobility point./*from  w w  w.  jav a 2s. c  om*/
 * 
 * @param username The username of the user to which this point belongs.
 * 
 * @param client The client value given on upload.
 * 
 * @param mobilityPoint The Mobility point to be created.
 * 
 * @throws DataAccessException Thrown if there is an error.
 */
@Override
public void createMobilityPoint(final String username, final String client, final MobilityPoint mobilityPoint)
        throws DataAccessException {

    // Create the transaction.
    DefaultTransactionDefinition def = new DefaultTransactionDefinition();
    def.setName("Creating a Mobility data point.");

    try {
        // Begin the transaction.
        PlatformTransactionManager transactionManager = new DataSourceTransactionManager(getDataSource());
        TransactionStatus status = transactionManager.getTransaction(def);

        JSONObject location = null;
        try {
            Location tLocation = mobilityPoint.getLocation();
            if (tLocation != null) {
                try {
                    location = tLocation.toJson(false, LocationColumnKey.ALL_COLUMNS);
                } catch (DomainException e) {
                    throw new DataAccessException(e);
                }
            }
        } catch (JSONException e) {
            throw new DataAccessException(e);
        }

        try {
            KeyHolder mobilityPointDatabaseKeyHolder = new GeneratedKeyHolder();
            getJdbcTemplate().update(new PreparedStatementCreator() {
                public PreparedStatement createPreparedStatement(Connection connection) throws SQLException {
                    PreparedStatement ps = connection.prepareStatement(SQL_INSERT, new String[] { "id" });

                    ps.setString(1, mobilityPoint.getId().toString());
                    ps.setString(2, username);
                    ps.setString(3, client);

                    ps.setLong(4, mobilityPoint.getTime());
                    ps.setString(5, mobilityPoint.getTimezone().getID());

                    ps.setString(6, mobilityPoint.getLocationStatus().toString().toLowerCase());
                    try {
                        Location location = mobilityPoint.getLocation();
                        ps.setString(7, ((location == null) ? null
                                : location.toJson(false, LocationColumnKey.ALL_COLUMNS).toString()));
                    } catch (JSONException e) {
                        throw new SQLException("Could not create a JSONObject for the location.", e);
                    } catch (DomainException e) {
                        throw new SQLException("Could not create a JSONObject for the location.", e);
                    }

                    ps.setString(8, mobilityPoint.getMode().toString().toLowerCase());

                    ps.setString(9, mobilityPoint.getPrivacyState().toString());

                    return ps;
                }
            }, mobilityPointDatabaseKeyHolder);

            // If it's an extended entry, add the sensor data.
            if (SubType.SENSOR_DATA.equals(mobilityPoint.getSubType())) {
                JSONObject sensorData;
                try {
                    sensorData = mobilityPoint.getSensorData().toJson(false, SensorDataColumnKey.ALL_COLUMNS);
                } catch (JSONException e) {
                    throw new DataAccessException(e);
                } catch (DomainException e) {
                    throw new DataAccessException(e);
                }

                JSONObject classifierData;
                try {
                    ClassifierData tClassifierData = mobilityPoint.getClassifierData();

                    if (tClassifierData == null) {
                        classifierData = null;
                    } else {
                        classifierData = tClassifierData.toJson(false, ClassifierDataColumnKey.ALL_COLUMNS);
                    }
                } catch (JSONException e) {
                    throw new DataAccessException(e);
                } catch (DomainException e) {
                    throw new DataAccessException(e);
                }

                try {
                    getJdbcTemplate().update(SQL_INSERT_EXTENDED,
                            mobilityPointDatabaseKeyHolder.getKey().longValue(), sensorData.toString(),
                            (classifierData == null) ? (new JSONObject()).toString()
                                    : classifierData.toString(),
                            MobilityClassifier.getVersion());
                } catch (org.springframework.dao.DataAccessException e) {
                    transactionManager.rollback(status);
                    throw new DataAccessException(
                            "Error executing SQL '" + SQL_INSERT_EXTENDED + "' with parameters: "
                                    + mobilityPointDatabaseKeyHolder.getKey().longValue() + ", "
                                    + sensorData.toString() + ", "
                                    + ((classifierData == null) ? (new JSONObject()).toString()
                                            : classifierData.toString())
                                    + ", " + MobilityClassifier.getVersion(),
                            e);
                }
            }
        }
        // If this is a duplicate upload, we will ignore it by rolling back
        // to where we were before we started and return.
        catch (org.springframework.dao.DataIntegrityViolationException e) {
            // FIXME: Now that we use UUIDs, the client should not be 
            // submitting duplicates. We probably want to, at the very 
            // least make a warning message and at most fail the request.
            if (!isDuplicate(e)) {
                transactionManager.rollback(status);
                throw new DataAccessException("Error executing SQL '" + SQL_INSERT + "' with parameters: "
                        + mobilityPoint.getId().toString() + ", " + username + ", " + client + ", "
                        + mobilityPoint.getTime() + ", " + mobilityPoint.getTimezone().getID() + ", "
                        + mobilityPoint.getLocationStatus().toString().toLowerCase() + ", "
                        + ((location == null) ? "null" : location.toString()) + ", "
                        + mobilityPoint.getMode().toString().toLowerCase() + ", "
                        + mobilityPoint.getPrivacyState(), e);
            }
        } catch (org.springframework.dao.DataAccessException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("Error executing SQL '" + SQL_INSERT + "' with parameters: "
                    + mobilityPoint.getId().toString() + ", " + username + ", " + client + ", "
                    + mobilityPoint.getTime() + ", " + mobilityPoint.getTimezone().getID() + ", "
                    + mobilityPoint.getLocationStatus().toString().toLowerCase() + ", "
                    + ((location == null) ? "null" : location.toString()) + ", "
                    + mobilityPoint.getMode().toString().toLowerCase() + ", " + mobilityPoint.getPrivacyState(),
                    e);
        }

        // Commit the transaction.
        try {
            transactionManager.commit(status);
        } catch (TransactionException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("Error while committing the transaction.", e);
        }
    } catch (TransactionException e) {
        throw new DataAccessException("Error while attempting to rollback the transaction.", e);
    }
}

From source file:com.opengamma.util.test.DbTest.java

public DataSourceTransactionManager getTransactionManager() {
    return new DataSourceTransactionManager(getDbTool().getDataSource());
}

From source file:org.ohmage.query.impl.ClassQueries.java

@Override
public void createClass(String classId, String className, String classDescription) throws DataAccessException {
    // Create the transaction.
    DefaultTransactionDefinition def = new DefaultTransactionDefinition();
    def.setName("Creating a new class.");

    try {/*  w  ww .  jav a  2 s.co  m*/
        // Begin the transaction.
        PlatformTransactionManager transactionManager = new DataSourceTransactionManager(getDataSource());
        TransactionStatus status = transactionManager.getTransaction(def);

        // Insert the class.
        try {
            getJdbcTemplate().update(SQL_INSERT_CLASS, new Object[] { classId, className, classDescription });
        } catch (org.springframework.dao.DataAccessException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("Error while executing SQL '" + SQL_INSERT_CLASS
                    + "' with parameters: " + classId + ", " + className + ", " + classDescription, e);
        }

        // Commit the transaction.
        try {
            transactionManager.commit(status);
        } catch (TransactionException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("Error while committing the transaction.", e);
        }
    } catch (TransactionException e) {
        throw new DataAccessException("Error while attempting to rollback the transaction.", e);
    }
}

From source file:dao.FlowsDAO.java

public static ObjectNode getPagedProjects(int page, int size) {
    ObjectNode result;/*from  w  w  w.  j a va 2  s .  c  o  m*/

    javax.sql.DataSource ds = getJdbcTemplate().getDataSource();
    DataSourceTransactionManager tm = new DataSourceTransactionManager(ds);
    TransactionTemplate txTemplate = new TransactionTemplate(tm);
    result = txTemplate.execute(new TransactionCallback<ObjectNode>() {
        public ObjectNode doInTransaction(TransactionStatus status) {
            ObjectNode resultNode = Json.newObject();
            long count = 0;
            List<Flow> pagedFlows = new ArrayList<Flow>();
            List<Map<String, Object>> rows = null;
            rows = getJdbcTemplate().queryForList(GET_PAGED_FLOWS, (page - 1) * size, size);

            try {
                count = getJdbcTemplate().queryForObject("SELECT FOUND_ROWS()", Long.class);
            } catch (EmptyResultDataAccessException e) {
                Logger.error("Exception = " + e.getMessage());
            }
            for (Map row : rows) {
                Flow flow = new Flow();
                flow.id = (Long) row.get("flow_id");
                flow.level = (Integer) row.get("flow_level");
                flow.appId = (Integer) row.get("app_id");
                flow.group = (String) row.get("flow_group");
                flow.name = (String) row.get("flow_name");
                flow.path = (String) row.get("flow_path");
                flow.appCode = (String) row.get("app_code");
                if (StringUtils.isNotBlank(flow.path)) {
                    int index = flow.path.indexOf(":");
                    if (index != -1) {
                        flow.path = flow.path.substring(0, index);
                    }
                }
                Object created = row.get("created_time");
                if (created != null) {
                    flow.created = created.toString();
                }
                Object modified = row.get("modified_time");
                if (modified != null) {
                    flow.modified = row.get("modified_time").toString();
                }

                int jobCount = 0;

                if (flow.id != null && flow.id != 0) {
                    try {
                        jobCount = getJdbcTemplate().queryForObject(GET_JOB_COUNT_BY_APP_ID_AND_FLOW_ID,
                                new Object[] { flow.appId, flow.id }, Integer.class);
                        flow.jobCount = jobCount;
                    } catch (EmptyResultDataAccessException e) {
                        Logger.error("Exception = " + e.getMessage());
                    }
                }
                pagedFlows.add(flow);
            }
            resultNode.set("flows", Json.toJson(pagedFlows));
            resultNode.put("count", count);
            resultNode.put("page", page);
            resultNode.put("itemsPerPage", size);
            resultNode.put("totalPages", (int) Math.ceil(count / ((double) size)));

            return resultNode;
        }
    });
    return result;
}

From source file:net.collegeman.grails.e3db.Template.java

/**
 * @param dataSource A JDBC DataSource implementation
 *//*from w ww . ja va  2  s . c o m*/
public final void dataSource(DataSource dataSource) {
    if (dataSource == null)
        throw new IllegalArgumentException("dataSource cannot be null.");
    this.dataSource = dataSource;
    simpleJdbcTemplate = new SimpleJdbcTemplate(dataSource);
    transactionTemplate = new TransactionTemplate(new DataSourceTransactionManager(dataSource));
}

From source file:org.ohmage.query.impl.DocumentQueries.java

public String createDocument(byte[] contents, String name, String description,
        Document.PrivacyState privacyState, Map<String, Document.Role> campaignRoleMap,
        Map<String, Document.Role> classRoleMap, String creatorUsername) throws DataAccessException {

    // Create a new, random UUID to use to save this file.
    String uuid = UUID.randomUUID().toString();

    // getDirectory() is used as opposed to accessing the current leaf
    // directory class variable as it will do sanitation in case it hasn't
    // been initialized or is full.
    File documentDirectory = getDirectory();
    File newFile = new File(documentDirectory.getAbsolutePath() + "/" + uuid);
    String url = "file://" + newFile.getAbsolutePath();

    // Write the document to the file system.
    try {//from  w  ww.java2  s  .com
        FileOutputStream os = new FileOutputStream(newFile);
        os.write(contents);
        os.flush();
        os.close();
    } catch (IOException e) {
        throw new DataAccessException("Error writing the new document to the system.", e);
    }
    long fileLength = newFile.length();

    // Parse the name and get the extension.
    String extension = getExtension(name);

    // Create the transaction.
    DefaultTransactionDefinition def = new DefaultTransactionDefinition();
    def.setName("Creating a new document.");

    try {
        // Begin the transaction.
        PlatformTransactionManager transactionManager = new DataSourceTransactionManager(getDataSource());
        TransactionStatus status = transactionManager.getTransaction(def);

        // Insert the file in the DB.
        try {
            getJdbcTemplate().update(SQL_INSERT_DOCUMENT, new Object[] { uuid, name, description, extension,
                    url, fileLength, privacyState.toString() });
        } catch (org.springframework.dao.DataAccessException e) {
            if (newFile.delete()) {
                LOGGER.warn("Could not delete the file that was just created: " + newFile.toString());
            }
            transactionManager.rollback(status);
            throw new DataAccessException("Error executing SQL '" + SQL_INSERT_DOCUMENT + "' with parameters: "
                    + uuid + ", " + name + ", " + description + ", " + extension + ", " + url + ", "
                    + fileLength + ", " + privacyState, e);
        }

        // Insert the creator in the DB.
        try {
            getJdbcTemplate().update(SQL_INSERT_DOCUMENT_USER_CREATOR, new Object[] { uuid, creatorUsername });
        } catch (org.springframework.dao.DataAccessException e) {
            if (newFile.delete()) {
                LOGGER.warn("Could not delete the file that was just created: " + newFile.toString());
            }
            transactionManager.rollback(status);
            throw new DataAccessException("Error executing SQL '" + SQL_INSERT_DOCUMENT_USER_CREATOR
                    + "' with parameters: " + uuid + ", " + creatorUsername, e);
        }

        // Insert this user's user-role in the DB.
        try {
            getJdbcTemplate().update(SQL_INSERT_USER_ROLE,
                    new Object[] { uuid, creatorUsername, Document.Role.OWNER.toString() });
        } catch (org.springframework.dao.DataAccessException e) {
            if (newFile.delete()) {
                LOGGER.warn("Could not delete the file that was just created: " + newFile.toString());
            }
            transactionManager.rollback(status);
            throw new DataAccessException("Error executing SQL '" + SQL_INSERT_USER_ROLE + "' with parameters: "
                    + uuid + ", " + creatorUsername + ", " + Document.Role.OWNER, e);
        }

        // Insert any campaign associations in the DB.
        if (campaignRoleMap != null) {
            for (String campaignId : campaignRoleMap.keySet()) {
                // Attempt to insert it into the database.
                try {
                    getJdbcTemplate().update(SQL_INSERT_CAMPAIGN_ROLE,
                            new Object[] { uuid, campaignId, campaignRoleMap.get(campaignId).toString() });
                } catch (org.springframework.dao.DataAccessException e) {
                    if (newFile.delete()) {
                        LOGGER.warn("Could not delete the file that was just created: " + newFile.toString());
                    }
                    transactionManager.rollback(status);
                    throw new DataAccessException(
                            "Error executing SQL '" + SQL_INSERT_CAMPAIGN_ROLE + "' with parameters: " + uuid
                                    + ", " + campaignId + ", " + campaignRoleMap.get(campaignId),
                            e);
                }
            }
        }

        // Insert any class associations in the DB.
        if (classRoleMap != null) {
            for (String classId : classRoleMap.keySet()) {
                // Attempt to insert it into the database.
                try {
                    getJdbcTemplate().update(SQL_INSERT_CLASS_ROLE,
                            new Object[] { uuid, classId, classRoleMap.get(classId).toString() });
                } catch (org.springframework.dao.DataAccessException e) {
                    if (newFile.delete()) {
                        LOGGER.warn("Could not delete the file that was just created: " + newFile.toString());
                    }
                    transactionManager.rollback(status);
                    throw new DataAccessException("Error executing SQL '" + SQL_INSERT_CLASS_ROLE
                            + "' with parameters: " + uuid + ", " + classId + ", " + classRoleMap.get(classId),
                            e);
                }
            }
        }

        // Commit the transaction.
        try {
            transactionManager.commit(status);
        } catch (TransactionException e) {
            transactionManager.rollback(status);
            throw new DataAccessException("Error while committing the transaction.", e);
        }

        return uuid;
    } catch (TransactionException e) {
        throw new DataAccessException("Error while attempting to rollback the transaction.", e);
    }
}

From source file:ca.nrc.cadc.vos.server.NodeDAO.java

/**
 * NodeDAO Constructor. This class was developed and tested using a
 * Sybase ASE RDBMS. Some SQL (update commands in particular) may be non-standard.
 *
 * @param dataSource//ww  w . j a  v  a 2s . com
 * @param nodeSchema
 * @param authority
 * @param identManager
 */
public NodeDAO(DataSource dataSource, NodeSchema nodeSchema, String authority, IdentityManager identManager,
        String deletedNodePath) {
    this.dataSource = dataSource;
    this.nodeSchema = nodeSchema;
    this.authority = authority;
    this.identManager = identManager;
    this.deletedNodePath = deletedNodePath;

    this.defaultTransactionDef = new DefaultTransactionDefinition();
    defaultTransactionDef.setIsolationLevel(DefaultTransactionDefinition.ISOLATION_REPEATABLE_READ);
    this.dirtyReadTransactionDef = new DefaultTransactionDefinition();
    dirtyReadTransactionDef.setIsolationLevel(DefaultTransactionDefinition.ISOLATION_READ_UNCOMMITTED);
    this.jdbc = new JdbcTemplate(dataSource);
    this.transactionManager = new DataSourceTransactionManager(dataSource);

    this.dateFormat = DateUtil.getDateFormat(DateUtil.IVOA_DATE_FORMAT, DateUtil.UTC);
    this.cal = Calendar.getInstance(DateUtil.UTC);
}

From source file:dao.FlowsDAO.java

public static ObjectNode getPagedProjectsByApplication(String applicationName, int page, int size) {
    String application = applicationName.replace(".", " ");
    ObjectNode result = Json.newObject();

    javax.sql.DataSource ds = getJdbcTemplate().getDataSource();
    DataSourceTransactionManager tm = new DataSourceTransactionManager(ds);
    TransactionTemplate txTemplate = new TransactionTemplate(tm);

    Integer appID = getApplicationIDByName(applicationName);
    if (appID != 0) {

        final int applicationID = appID;

        result = txTemplate.execute(new TransactionCallback<ObjectNode>() {
            public ObjectNode doInTransaction(TransactionStatus status) {
                ObjectNode resultNode = Json.newObject();
                long count = 0;
                List<Flow> pagedFlows = new ArrayList<Flow>();
                List<Map<String, Object>> rows = null;
                rows = getJdbcTemplate().queryForList(GET_PAGED_FLOWS_BY_APP_ID, applicationID,
                        (page - 1) * size, size);

                try {
                    count = getJdbcTemplate().queryForObject("SELECT FOUND_ROWS()", Long.class);
                } catch (EmptyResultDataAccessException e) {
                    Logger.error("Exception = " + e.getMessage());
                }//from   ww w. j  a  v  a  2 s .  c o m
                for (Map row : rows) {
                    Flow flow = new Flow();
                    flow.id = (Long) row.get("flow_id");
                    flow.level = (Integer) row.get("flow_level");
                    flow.appId = (Integer) row.get("app_id");
                    flow.group = (String) row.get("flow_group");
                    flow.name = (String) row.get("flow_name");
                    flow.path = (String) row.get("flow_path");
                    flow.appCode = (String) row.get("app_code");
                    if (StringUtils.isNotBlank(flow.path)) {
                        int index = flow.path.indexOf(":");
                        if (index != -1) {
                            flow.path = flow.path.substring(0, index);
                        }
                    }
                    Object created = row.get("created_time");
                    if (created != null) {
                        flow.created = DateFormat.format(created.toString());
                    }
                    Object modified = row.get("modified_time");
                    if (modified != null) {
                        flow.modified = DateFormat.format(row.get("modified_time").toString());
                    }

                    int jobCount = 0;

                    if (flow.id != null && flow.id != 0) {
                        try {
                            jobCount = getJdbcTemplate().queryForObject(GET_JOB_COUNT_BY_APP_ID_AND_FLOW_ID,
                                    new Object[] { flow.appId, flow.id }, Integer.class);
                            flow.jobCount = jobCount;
                        } catch (EmptyResultDataAccessException e) {
                            Logger.error("Exception = " + e.getMessage());
                        }
                    }
                    pagedFlows.add(flow);
                }
                resultNode.set("flows", Json.toJson(pagedFlows));
                resultNode.put("count", count);
                resultNode.put("page", page);
                resultNode.put("itemsPerPage", size);
                resultNode.put("totalPages", (int) Math.ceil(count / ((double) size)));

                return resultNode;
            }
        });
        return result;
    }

    result = Json.newObject();
    result.put("count", 0);
    result.put("page", page);
    result.put("itemsPerPage", size);
    result.put("totalPages", 0);
    result.set("flows", Json.toJson(""));
    return result;
}