Example usage for org.springframework.dao DataAccessException DataAccessException

List of usage examples for org.springframework.dao DataAccessException DataAccessException

Introduction

In this page you can find the example usage for org.springframework.dao DataAccessException DataAccessException.

Prototype

public DataAccessException(@Nullable String msg, @Nullable Throwable cause) 

Source Link

Document

Constructor for DataAccessException.

Usage

From source file:gov.nih.nci.ncicb.tcga.dcc.dam.dao.DAMQueriesLevel2.java

public void addPathsToSelectedFiles(final List<DataFile> selectedFiles)
        throws DataAccessMatrixQueries.DAMQueriesException {

    transactionTemplate.execute(new TransactionCallback() {
        public Object doInTransaction(final TransactionStatus transactionStatus) {
            // create consolidated files for DataFileLevelTwoConsolidated
            // for other DataFiles do nothing
            for (final DataFile dataFile : selectedFiles) {
                if (dataFile instanceof DataFileLevelTwoConsolidated) {
                    try {
                        createConsolidatedFiles(dataFile);
                    } catch (IOException ie) {
                        throw new DataAccessException(ie.getMessage(), ie) {
                        };//from   ww w.  j  a v a 2s  .  com
                    }
                }
            }
            return null;

        }
    });
}

From source file:gov.nih.nci.ncicb.tcga.dcc.dam.dao.DAMQueriesLevel2.java

/**
 * Generates the actual data file for the given data file object, writing the contents to the given Writer.
 *
 * @param dataFile the data file holding the information about what to write
 * @param writer   the writer to write to
 *///from w w w  .j a va2s.co m
protected void generateFile(final DataFileLevelTwoThree dataFile, final Writer writer) {
    /*
    Note: this happens within a transaction because we write to a temp table first and then
    query against it to extract the data, and we need to make sure that the temp table insertion and
    subsequent query occur in the same transaction or else the temp table data may not be visible to us.
    (It is a global temp table where each session can only see its own inserted data.)
     */
    transactionTemplate.execute(new TransactionCallback() {
        public Object doInTransaction(final TransactionStatus transactionStatus) {
            try {
                generateFileInTransaction(dataFile, writer);
            } catch (IOException e) {
                throw new DataAccessException(e.getMessage(), e) {
                };
            }
            return null;
        }
    });

}

From source file:gov.nih.nci.ncicb.tcga.dcc.dam.dao.DAMQueriesLevel2.java

private void generateFileInTransaction(final DataFileLevelTwoThree dataFile, final Writer writer)
        throws IOException {
    /*//from   w  w  w.  j  av a  2  s  .c om
    Set the variables for column names we'll need locally.  Local var is 6 times faster than a shared constant.
    If you change the select for HYBRIDIZATION_VALUE_QUERY please change these values and maintain column number order.
     */
    final Integer PROBE_NAME = 1;
    final Integer CHROMOSOME = 2;
    final Integer START_POSITION = 3;
    final Integer END_POSITION = 4;
    final Integer HYBRIDIZATION_REF_ID = 5;
    final Integer GROUP_COLUMN_NAME = 6;
    final Integer VALUE = 7;

    final String STRING = "-";
    // 1. gather barcodes and hyb_data_groups from database
    final List<String> hybDataGroupNames = gatherHybridizationDataGroupNames(
            dataFile.getDataSetsDP().iterator().next());
    final Map<String, Long> hybrefIdToBarcodeMap = getBarcodesForHybrefs(dataFile);

    // sort barcodes into the order we want to write them in the file (alphabetical)
    final String[] orderedBarcodes = new String[hybrefIdToBarcodeMap.size()];
    hybrefIdToBarcodeMap.keySet().toArray(orderedBarcodes);
    Arrays.sort(orderedBarcodes);

    int platformId = getPortalPlatformId(dataFile.getDataSetsDP().iterator().next());

    final boolean willHaveProbeConstants = getWillHaveProbeConstants(platformId);
    writeHeaders(writer, hybDataGroupNames, orderedBarcodes, willHaveProbeConstants);

    List<Object> queryBindValues = new ArrayList<Object>();
    String query = prepareQueryAndBindVariables(dataFile, queryBindValues, platformId);
    insertTempHybrefIds(dataFile.getHybRefIds());
    insertTempDataSetIds(dataFile.getDataSetsDP());
    final Map<String, String> currentRowValues = new HashMap<String, String>(); // keyed by "hybref_id.data_group_name"
    final String[] lastProbe = new String[] { null, null, null }; // done this way b/c used by inner class
    getJdbcTemplate().query(query, queryBindValues.toArray(), new RowCallbackHandler() {
        public void processRow(final ResultSet resultSet) throws SQLException {
            resultSet.setFetchSize(DEFAULT_FETCHSIZE);

            String currentProbe = resultSet.getString(PROBE_NAME);
            if (lastProbe[0] != null && !lastProbe[0].equals(currentProbe)) {
                // this result set is the start of a new row, so write the old one
                try {
                    writeDataRow(lastProbe, currentRowValues, orderedBarcodes, hybrefIdToBarcodeMap,
                            hybDataGroupNames, writer, willHaveProbeConstants);
                    currentRowValues.clear();
                } catch (IOException e) {
                    getLogger().logError(e);
                    throw new DataAccessException(e.getMessage(), e) {
                    };
                }
            }

            // store this value in the values map, keyed by combination of hybrefid and datagroup name
            final String key = resultSet.getLong(HYBRIDIZATION_REF_ID) + "."
                    + resultSet.getString(GROUP_COLUMN_NAME);
            currentRowValues.put(key, resultSet.getString(VALUE));
            lastProbe[0] = currentProbe;
            lastProbe[1] = resultSet.getString(CHROMOSOME);
            lastProbe[2] = resultSet.getString(START_POSITION) + STRING + resultSet.getString(END_POSITION);
        }
    });
    // write last row!
    if (lastProbe[0] != null) {
        writeDataRow(lastProbe, currentRowValues, orderedBarcodes, hybrefIdToBarcodeMap, hybDataGroupNames,
                writer, willHaveProbeConstants);
    }
}

From source file:org.pentaho.aggdes.ui.exec.impl.JdbcTemplateSqlExecutor.java

public void execute(final String[] sql, final ExecutorCallback callback) throws DataAccessException {
    Exception exceptionDuringExecute = null;
    DatabaseMeta dbMeta = connectionModel.getDatabaseMeta();
    String url = null;//w ww. j a va 2 s.c o m
    try {
        url = dbMeta.getURL();
    } catch (KettleDatabaseException e) {
        throw new DataAccessException("DatabaseMeta problem", e) {
            private static final long serialVersionUID = -3457360074729938909L;
        };
    }
    // create the datasource
    DataSource ds = new SingleConnectionDataSource(dbMeta.getDriverClass(), url, dbMeta.getUsername(),
            dbMeta.getPassword(), false);

    // create the jdbc template
    final JdbcTemplate jt = new JdbcTemplate(ds);

    // create the transaction manager
    DataSourceTransactionManager tsMan = new DataSourceTransactionManager(ds);

    // create the transaction template
    TransactionTemplate txTemplate = new TransactionTemplate(tsMan);

    // set behavior
    txTemplate.setPropagationBehavior(DefaultTransactionDefinition.PROPAGATION_REQUIRES_NEW);
    final String noCommentSql[] = removeCommentsAndSemicolons(connectionModel.getSchema(), sql);
    try {
        // run the code in a transaction
        txTemplate.execute(new TransactionCallbackWithoutResult() {
            public void doInTransactionWithoutResult(TransactionStatus status) {
                jt.batchUpdate(noCommentSql);
            }
        });
    } catch (DataAccessException e) {
        if (logger.isErrorEnabled()) {
            logger.error("data access exception", e);
        }
        exceptionDuringExecute = e;
    }
    callback.executionComplete(exceptionDuringExecute);

}