com.alibaba.otter.node.etl.load.loader.db.DbLoadAction.java Source code

Java tutorial

Introduction

Here is the source code for com.alibaba.otter.node.etl.load.loader.db.DbLoadAction.java

Source

/*
 * Copyright (C) 2010-2101 Alibaba Group Holding Limited.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package com.alibaba.otter.node.etl.load.loader.db;

import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.ddlutils.model.Column;
import org.apache.ddlutils.model.Table;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.dao.DataAccessException;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.dao.DeadlockLoserDataAccessException;
import org.springframework.jdbc.core.BatchPreparedStatementSetter;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.PreparedStatementSetter;
import org.springframework.jdbc.core.StatementCallback;
import org.springframework.jdbc.core.StatementCreatorUtils;
import org.springframework.jdbc.support.lob.LobCreator;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;

import com.alibaba.otter.node.common.config.ConfigClientService;
import com.alibaba.otter.node.etl.common.db.dialect.DbDialect;
import com.alibaba.otter.node.etl.common.db.dialect.DbDialectFactory;
import com.alibaba.otter.node.etl.common.db.dialect.mysql.MysqlDialect;
import com.alibaba.otter.node.etl.common.db.utils.SqlUtils;
import com.alibaba.otter.node.etl.load.exception.LoadException;
import com.alibaba.otter.node.etl.load.loader.LoadStatsTracker;
import com.alibaba.otter.node.etl.load.loader.LoadStatsTracker.LoadCounter;
import com.alibaba.otter.node.etl.load.loader.LoadStatsTracker.LoadThroughput;
import com.alibaba.otter.node.etl.load.loader.db.DbLoadData.TableLoadData;
import com.alibaba.otter.node.etl.load.loader.db.context.DbLoadContext;
import com.alibaba.otter.node.etl.load.loader.interceptor.LoadInterceptor;
import com.alibaba.otter.node.etl.load.loader.weight.WeightBuckets;
import com.alibaba.otter.node.etl.load.loader.weight.WeightController;
import com.alibaba.otter.shared.common.model.config.ConfigHelper;
import com.alibaba.otter.shared.common.model.config.channel.Channel;
import com.alibaba.otter.shared.common.model.config.data.DataMedia;
import com.alibaba.otter.shared.common.model.config.data.DataMediaPair;
import com.alibaba.otter.shared.common.model.config.data.db.DbMediaSource;
import com.alibaba.otter.shared.common.model.config.pipeline.Pipeline;
import com.alibaba.otter.shared.common.utils.thread.NamedThreadFactory;
import com.alibaba.otter.shared.etl.model.EventColumn;
import com.alibaba.otter.shared.etl.model.EventData;
import com.alibaba.otter.shared.etl.model.EventType;
import com.alibaba.otter.shared.etl.model.Identity;
import com.alibaba.otter.shared.etl.model.RowBatch;

/**
 * ?load?
 * 
 * @author jianghang 2011-10-31 ?03:17:43
 * @version 4.0.0
 */
public class DbLoadAction implements InitializingBean, DisposableBean {

    private static final Logger logger = LoggerFactory.getLogger(DbLoadAction.class);
    private static final String WORKER_NAME = "DbLoadAction";
    private static final String WORKER_NAME_FORMAT = "pipelineId = %s , pipelineName = %s , " + WORKER_NAME;
    private static final int DEFAULT_POOL_SIZE = 5;
    private int poolSize = DEFAULT_POOL_SIZE;
    private int retry = 3;
    private int retryWait = 3000;
    private LoadInterceptor interceptor;
    private ExecutorService executor;
    private DbDialectFactory dbDialectFactory;
    private ConfigClientService configClientService;
    private int batchSize = 50;
    private boolean useBatch = true;
    private LoadStatsTracker loadStatsTracker;

    /**
     * ??
     */
    public DbLoadContext load(RowBatch rowBatch, WeightController controller) {
        Assert.notNull(rowBatch);
        Identity identity = rowBatch.getIdentity();
        DbLoadContext context = buildContext(identity);

        try {
            List<EventData> datas = rowBatch.getDatas();
            context.setPrepareDatas(datas);
            // ???
            datas = context.getPrepareDatas();
            if (datas == null || datas.size() == 0) {
                logger.info("##no eventdata for load, return");
                return context;
            }

            // ?DbBatchLoaderDateMediaSource??????DbLoadAction?
            // media source???source??
            context.setDataMediaSource(
                    ConfigHelper.findDataMedia(context.getPipeline(), datas.get(0).getTableId()).getSource());
            interceptor.prepare(context);
            // ???
            datas = context.getPrepareDatas();
            // ?ddl?ddl/dml????batchcanal
            // ?ddl?ddlbatch??sql
            if (isDdlDatas(datas)) {
                doDdl(context, datas);
            } else {
                WeightBuckets<EventData> buckets = buildWeightBuckets(context, datas);
                List<Long> weights = buckets.weights();
                controller.start(weights);// weights?start
                if (CollectionUtils.isEmpty(datas)) {
                    logger.info("##no eventdata for load");
                }
                adjustPoolSize(context); // ?manager?
                adjustConfig(context); // ??
                // ???
                // ??
                for (int i = 0; i < weights.size(); i++) {
                    Long weight = weights.get(i);
                    controller.await(weight.intValue());
                    // ??weight?
                    List<EventData> items = buckets.getItems(weight);
                    logger.debug("##start load for weight:" + weight);
                    // ??

                    // ????pkI/U/D?
                    items = DbLoadMerger.merge(items);
                    // I/U/D?
                    DbLoadData loadData = new DbLoadData();
                    doBefore(items, context, loadData);
                    // load?
                    doLoad(context, loadData);
                    controller.single(weight.intValue());
                    logger.debug("##end load for weight:" + weight);
                }
            }
            interceptor.commit(context);
        } catch (InterruptedException e) {
            Thread.currentThread().interrupt();
            interceptor.error(context);
        } catch (Exception e) {
            interceptor.error(context);
            throw new LoadException(e);
        }

        return context;// ??
    }

    private DbLoadContext buildContext(Identity identity) {
        DbLoadContext context = new DbLoadContext();
        context.setIdentity(identity);
        Channel channel = configClientService.findChannel(identity.getChannelId());
        Pipeline pipeline = configClientService.findPipeline(identity.getPipelineId());
        context.setChannel(channel);
        context.setPipeline(pipeline);
        return context;
    }

    /**
     * ??datas. ddl sql?DML??ddl???DML
     * 
     * @return
     */
    private boolean isDdlDatas(List<EventData> eventDatas) {
        boolean result = false;
        for (EventData eventData : eventDatas) {
            result |= eventData.getEventType().isDdl();
            if (result && !eventData.getEventType().isDdl()) {
                throw new LoadException("ddl/dml can't be in one batch, it's may be a bug , pls submit issues.",
                        DbLoadDumper.dumpEventDatas(eventDatas));
            }
        }

        return result;
    }

    /**
     * weight??item?
     */
    private WeightBuckets<EventData> buildWeightBuckets(DbLoadContext context, List<EventData> datas) {
        WeightBuckets<EventData> buckets = new WeightBuckets<EventData>();
        for (EventData data : datas) {
            // ?weight
            DataMediaPair pair = ConfigHelper.findDataMediaPair(context.getPipeline(), data.getPairId());
            buckets.addItem(pair.getPushWeight(), data);
        }

        return buckets;
    }

    /**
     * ????
     */
    private void doBefore(List<EventData> items, final DbLoadContext context, final DbLoadData loadData) {
        for (final EventData item : items) {
            boolean filter = interceptor.before(context, item);
            if (!filter) {
                loadData.merge(item);// 
            }
        }
    }

    private void doLoad(final DbLoadContext context, DbLoadData loadData) {
        // ?delete,?batch
        List<List<EventData>> batchDatas = new ArrayList<List<EventData>>();
        for (TableLoadData tableData : loadData.getTables()) {
            if (useBatch) {
                // delete?uniqedelete + insert?????
                batchDatas.addAll(split(tableData.getDeleteDatas()));
            } else {
                // ??batch?????
                // delete?uniqedelete + insert?????
                for (EventData data : tableData.getDeleteDatas()) {
                    batchDatas.add(Arrays.asList(data));
                }
            }
        }

        if (context.getPipeline().getParameters().isDryRun()) {
            doDryRun(context, batchDatas, true);
        } else {
            doTwoPhase(context, batchDatas, true);
        }
        batchDatas.clear();

        // ?insert/update
        for (TableLoadData tableData : loadData.getTables()) {
            if (useBatch) {
                // insert + update?
                batchDatas.addAll(split(tableData.getInsertDatas()));
                batchDatas.addAll(split(tableData.getUpadateDatas()));// ??
            } else {
                // insert + update?
                for (EventData data : tableData.getInsertDatas()) {
                    batchDatas.add(Arrays.asList(data));
                }
                for (EventData data : tableData.getUpadateDatas()) {
                    batchDatas.add(Arrays.asList(data));
                }
            }
        }

        if (context.getPipeline().getParameters().isDryRun()) {
            doDryRun(context, batchDatas, true);
        } else {
            doTwoPhase(context, batchDatas, true);
        }
        batchDatas.clear();
    }

    /**
     * ?sql?batch?
     */
    private List<List<EventData>> split(List<EventData> datas) {
        List<List<EventData>> result = new ArrayList<List<EventData>>();
        if (datas == null || datas.size() == 0) {
            return result;
        } else {
            int[] bits = new int[datas.size()];// ???batch
            for (int i = 0; i < bits.length; i++) {
                // ?batch
                while (i < bits.length && bits[i] == 1) {
                    i++;
                }

                if (i >= bits.length) { // ??
                    break;
                }

                // batch?batchSize
                List<EventData> batch = new ArrayList<EventData>();
                bits[i] = 1;
                batch.add(datas.get(i));
                for (int j = i + 1; j < bits.length && batch.size() < batchSize; j++) {
                    if (bits[j] == 0 && canBatch(datas.get(i), datas.get(j))) {
                        batch.add(datas.get(j));
                        bits[j] = 1;// 
                    }
                }
                result.add(batch);
            }

            return result;
        }
    }

    /**
     * ???batch???sql?. ?schemaName
     */
    private boolean canBatch(EventData source, EventData target) {
        // return StringUtils.equals(source.getSchemaName(),
        // target.getSchemaName())
        // && StringUtils.equals(source.getTableName(), target.getTableName())
        // && StringUtils.equals(source.getSql(), target.getSql());
        // return StringUtils.equals(source.getSql(), target.getSql());

        // sqlTemplatesqlString.intern()??????==???
        return source.getSql() == target.getSql();
    }

    private void doDryRun(DbLoadContext context, List<List<EventData>> totalRows, boolean canBatch) {
        for (List<EventData> rows : totalRows) {
            if (CollectionUtils.isEmpty(rows)) {
                continue; // 
            }

            for (EventData row : rows) {
                processStat(row, context);// ??
            }

            context.getProcessedDatas().addAll(rows);
        }
    }

    /**
     * ddl??: 
     * 
     * @param context
     * @param eventDatas
     */
    private void doDdl(DbLoadContext context, List<EventData> eventDatas) {
        for (final EventData data : eventDatas) {
            DataMedia dataMedia = ConfigHelper.findDataMedia(context.getPipeline(), data.getTableId());
            final DbDialect dbDialect = dbDialectFactory.getDbDialect(context.getIdentity().getPipelineId(),
                    (DbMediaSource) dataMedia.getSource());
            Boolean skipDdlException = context.getPipeline().getParameters().getSkipDdlException();
            try {
                Boolean result = dbDialect.getJdbcTemplate().execute(new StatementCallback<Boolean>() {

                    public Boolean doInStatement(Statement stmt) throws SQLException, DataAccessException {
                        Boolean result = false;
                        if (dbDialect instanceof MysqlDialect && StringUtils.isNotEmpty(data.getDdlSchemaName())) {
                            // mysqlddl??schema
                            result &= stmt.execute("use " + data.getDdlSchemaName());
                        }
                        result &= stmt.execute(data.getSql());
                        return result;
                    }
                });
                if (result) {
                    context.getProcessedDatas().add(data); // ??sql
                } else {
                    context.getFailedDatas().add(data);
                }

            } catch (Throwable e) {
                if (skipDdlException) {
                    // do skip
                    logger.warn("skip exception for ddl : {} , caused by {}", data,
                            ExceptionUtils.getFullStackTrace(e));
                } else {
                    throw new LoadException(e);
                }
            }

        }
    }

    /**
     * ?
     */
    private void doTwoPhase(DbLoadContext context, List<List<EventData>> totalRows, boolean canBatch) {
        // ??
        List<Future<Exception>> results = new ArrayList<Future<Exception>>();
        for (List<EventData> rows : totalRows) {
            if (CollectionUtils.isEmpty(rows)) {
                continue; // 
            }

            results.add(executor.submit(new DbLoadWorker(context, rows, canBatch)));
        }

        boolean partFailed = false;
        for (int i = 0; i < results.size(); i++) {
            Future<Exception> result = results.get(i);
            Exception ex = null;
            try {
                ex = result.get();
                for (EventData data : totalRows.get(i)) {
                    interceptor.after(context, data);// ?
                }
            } catch (Exception e) {
                ex = e;
            }

            if (ex != null) {
                logger.warn("##load phase one failed!", ex);
                partFailed = true;
            }
        }

        if (true == partFailed) {
            // if (CollectionUtils.isEmpty(context.getFailedDatas())) {
            // logger.error("##load phase one failed but failedDatas is empty!");
            // return;
            // }

            // ???phase one???failed datas?
            List<EventData> retryEventDatas = new ArrayList<EventData>();
            for (List<EventData> rows : totalRows) {
                retryEventDatas.addAll(rows);
            }

            context.getFailedDatas().clear(); // ?failed data?

            // ?nullmanager????skipLoadException???
            Boolean skipLoadException = context.getPipeline().getParameters().getSkipLoadException();
            if (skipLoadException != null && skipLoadException) {// ??????load
                for (EventData retryEventData : retryEventDatas) {
                    DbLoadWorker worker = new DbLoadWorker(context, Arrays.asList(retryEventData), false);// batchfalse
                    try {
                        Exception ex = worker.call();
                        if (ex != null) {
                            // do skip
                            logger.warn("skip exception for data : {} , caused by {}", retryEventData,
                                    ExceptionUtils.getFullStackTrace(ex));
                        }
                    } catch (Exception ex) {
                        // do skip
                        logger.warn("skip exception for data : {} , caused by {}", retryEventData,
                                ExceptionUtils.getFullStackTrace(ex));
                    }
                }
            } else {
                // ??
                DbLoadWorker worker = new DbLoadWorker(context, retryEventDatas, false);// batchfalse
                try {
                    Exception ex = worker.call();
                    if (ex != null) {
                        throw ex; // 
                    }
                } catch (Exception ex) {
                    logger.error("##load phase two failed!", ex);
                    throw new LoadException(ex);
                }
            }

            // ?failed data?
            for (EventData data : retryEventDatas) {
                interceptor.after(context, data);// ?
            }
        }

    }

    // 
    private void adjustPoolSize(DbLoadContext context) {
        Pipeline pipeline = context.getPipeline();
        int newPoolSize = pipeline.getParameters().getLoadPoolSize();
        if (newPoolSize != poolSize) {
            poolSize = newPoolSize;
            if (executor instanceof ThreadPoolExecutor) {
                ThreadPoolExecutor pool = (ThreadPoolExecutor) executor;
                pool.setCorePoolSize(newPoolSize);
                pool.setMaximumPoolSize(newPoolSize);
            }
        }
    }

    private void adjustConfig(DbLoadContext context) {
        Pipeline pipeline = context.getPipeline();
        this.useBatch = pipeline.getParameters().isUseBatch();
    }

    public void afterPropertiesSet() throws Exception {
        executor = new ThreadPoolExecutor(poolSize, poolSize, 0L, TimeUnit.MILLISECONDS,
                new ArrayBlockingQueue(poolSize * 4), new NamedThreadFactory(WORKER_NAME),
                new ThreadPoolExecutor.CallerRunsPolicy());
    }

    public void destroy() throws Exception {
        executor.shutdownNow();
    }

    enum ExecuteResult {
        SUCCESS, ERROR, RETRY
    }

    class DbLoadWorker implements Callable<Exception> {

        private DbLoadContext context;
        private DbDialect dbDialect;
        private List<EventData> datas;
        private boolean canBatch;
        private List<EventData> allFailedDatas = new ArrayList<EventData>();
        private List<EventData> allProcesedDatas = new ArrayList<EventData>();
        private List<EventData> processedDatas = new ArrayList<EventData>();
        private List<EventData> failedDatas = new ArrayList<EventData>();

        public DbLoadWorker(DbLoadContext context, List<EventData> datas, boolean canBatch) {
            this.context = context;
            this.datas = datas;
            this.canBatch = canBatch;

            EventData data = datas.get(0); // eventData???????
            DataMedia dataMedia = ConfigHelper.findDataMedia(context.getPipeline(), data.getTableId());
            dbDialect = dbDialectFactory.getDbDialect(context.getIdentity().getPipelineId(),
                    (DbMediaSource) dataMedia.getSource());

        }

        public Exception call() throws Exception {
            try {
                Thread.currentThread().setName(String.format(WORKER_NAME_FORMAT, context.getPipeline().getId(),
                        context.getPipeline().getName()));
                return doCall();
            } finally {
                Thread.currentThread().setName(WORKER_NAME);
            }
        }

        private Exception doCall() {
            RuntimeException error = null;
            ExecuteResult exeResult = null;
            int index = 0;// ??
            for (; index < datas.size();) {
                // ??
                final List<EventData> splitDatas = new ArrayList<EventData>();
                if (useBatch && canBatch) {
                    int end = (index + batchSize > datas.size()) ? datas.size() : (index + batchSize);
                    splitDatas.addAll(datas.subList(index, end));
                    index = end;// 
                } else {
                    splitDatas.add(datas.get(index));
                    index = index + 1;// ?
                }

                int retryCount = 0;
                while (true) {
                    try {
                        if (CollectionUtils.isEmpty(failedDatas) == false) {
                            splitDatas.clear();
                            splitDatas.addAll(failedDatas); // ???
                        } else {
                            failedDatas.addAll(splitDatas); // ??lob,datasource
                        }

                        final LobCreator lobCreator = dbDialect.getLobHandler().getLobCreator();
                        if (useBatch && canBatch) {
                            // ?batch
                            final String sql = splitDatas.get(0).getSql();
                            int[] affects = new int[splitDatas.size()];
                            affects = (int[]) dbDialect.getTransactionTemplate().execute(new TransactionCallback() {

                                public Object doInTransaction(TransactionStatus status) {
                                    // ?
                                    try {
                                        failedDatas.clear(); // ?
                                        processedDatas.clear();
                                        interceptor.transactionBegin(context, splitDatas, dbDialect);
                                        JdbcTemplate template = dbDialect.getJdbcTemplate();
                                        int[] affects = template.batchUpdate(sql,
                                                new BatchPreparedStatementSetter() {

                                                    public void setValues(PreparedStatement ps, int idx)
                                                            throws SQLException {
                                                        doPreparedStatement(ps, dbDialect, lobCreator,
                                                                splitDatas.get(idx));
                                                    }

                                                    public int getBatchSize() {
                                                        return splitDatas.size();
                                                    }
                                                });
                                        interceptor.transactionEnd(context, splitDatas, dbDialect);
                                        return affects;
                                    } finally {
                                        lobCreator.close();
                                    }
                                }

                            });

                            // ?
                            for (int i = 0; i < splitDatas.size(); i++) {
                                processStat(splitDatas.get(i), affects[i], true);
                            }
                        } else {
                            final EventData data = splitDatas.get(0);// ??
                            int affect = 0;
                            affect = (Integer) dbDialect.getTransactionTemplate()
                                    .execute(new TransactionCallback() {

                                        public Object doInTransaction(TransactionStatus status) {
                                            try {
                                                failedDatas.clear(); // ?
                                                processedDatas.clear();
                                                interceptor.transactionBegin(context, Arrays.asList(data),
                                                        dbDialect);
                                                JdbcTemplate template = dbDialect.getJdbcTemplate();
                                                int affect = template.update(data.getSql(),
                                                        new PreparedStatementSetter() {

                                                            public void setValues(PreparedStatement ps)
                                                                    throws SQLException {
                                                                doPreparedStatement(ps, dbDialect, lobCreator,
                                                                        data);
                                                            }
                                                        });
                                                interceptor.transactionEnd(context, Arrays.asList(data), dbDialect);
                                                return affect;
                                            } finally {
                                                lobCreator.close();
                                            }
                                        }
                                    });
                            // ?
                            processStat(data, affect, false);
                        }

                        error = null;
                        exeResult = ExecuteResult.SUCCESS;
                    } catch (DeadlockLoserDataAccessException ex) {
                        error = new LoadException(ExceptionUtils.getFullStackTrace(ex),
                                DbLoadDumper.dumpEventDatas(splitDatas));
                        exeResult = ExecuteResult.RETRY;
                    } catch (DataIntegrityViolationException ex) {
                        error = new LoadException(ExceptionUtils.getFullStackTrace(ex),
                                DbLoadDumper.dumpEventDatas(splitDatas));
                        // if (StringUtils.contains(ex.getMessage(),
                        // "ORA-00001")) {
                        // exeResult = ExecuteResult.RETRY;
                        // } else {
                        // exeResult = ExecuteResult.ERROR;
                        // }
                        exeResult = ExecuteResult.ERROR;
                    } catch (RuntimeException ex) {
                        error = new LoadException(ExceptionUtils.getFullStackTrace(ex),
                                DbLoadDumper.dumpEventDatas(splitDatas));
                        exeResult = ExecuteResult.ERROR;
                    } catch (Throwable ex) {
                        error = new LoadException(ExceptionUtils.getFullStackTrace(ex),
                                DbLoadDumper.dumpEventDatas(splitDatas));
                        exeResult = ExecuteResult.ERROR;
                    }

                    if (ExecuteResult.SUCCESS == exeResult) {
                        allFailedDatas.addAll(failedDatas);// all
                        allProcesedDatas.addAll(processedDatas);
                        failedDatas.clear();// ?
                        processedDatas.clear();
                        break; // do next eventData
                    } else if (ExecuteResult.RETRY == exeResult) {
                        retryCount = retryCount + 1;// 
                        // ??
                        processedDatas.clear();
                        failedDatas.clear();
                        failedDatas.addAll(splitDatas);
                        if (retryCount >= retry) {
                            processFailedDatas(index);// ??
                            throw new LoadException(String.format("execute [%s] retry %s times failed",
                                    context.getIdentity().toString(), retryCount), error);
                        } else {
                            try {
                                int wait = retryCount * retryWait;
                                wait = (wait < retryWait) ? retryWait : wait;
                                Thread.sleep(wait);
                            } catch (InterruptedException ex) {
                                Thread.interrupted();
                                processFailedDatas(index);// ?
                                throw new LoadException(ex);
                            }
                        }
                    } else {
                        // ??
                        processedDatas.clear();
                        failedDatas.clear();
                        failedDatas.addAll(splitDatas);
                        processFailedDatas(index);// ?
                        throw error;
                    }
                }
            }

            // ??,affect = 0
            context.getFailedDatas().addAll(allFailedDatas);
            context.getProcessedDatas().addAll(allProcesedDatas);
            return null;
        }

        private void doPreparedStatement(PreparedStatement ps, DbDialect dbDialect, LobCreator lobCreator,
                EventData data) throws SQLException {
            EventType type = data.getEventType();
            // ?insert/update????
            List<EventColumn> columns = new ArrayList<EventColumn>();
            if (type.isInsert()) {
                columns.addAll(data.getColumns()); // insert
                columns.addAll(data.getKeys());
            } else if (type.isDelete()) {
                columns.addAll(data.getKeys());
            } else if (type.isUpdate()) {
                boolean existOldKeys = !CollectionUtils.isEmpty(data.getOldKeys());
                columns.addAll(data.getUpdatedColumns());// ?isUpdate=true
                columns.addAll(data.getKeys());
                if (existOldKeys) {
                    columns.addAll(data.getOldKeys());
                }
            }

            // ??????
            Table table = dbDialect.findTable(data.getSchemaName(), data.getTableName());
            Map<String, Boolean> isRequiredMap = new HashMap<String, Boolean>();
            for (Column tableColumn : table.getColumns()) {
                isRequiredMap.put(StringUtils.lowerCase(tableColumn.getName()), tableColumn.isRequired());
            }

            for (int i = 0; i < columns.size(); i++) {
                int paramIndex = i + 1;
                EventColumn column = columns.get(i);
                int sqlType = column.getColumnType();

                Boolean isRequired = isRequiredMap.get(StringUtils.lowerCase(column.getColumnName()));
                if (isRequired == null) {
                    throw new LoadException(String.format("column name %s is not found in Table[%s]",
                            column.getColumnName(), table.toString()));
                }

                Object param = null;
                if (dbDialect instanceof MysqlDialect
                        && (sqlType == Types.TIME || sqlType == Types.TIMESTAMP || sqlType == Types.DATE)) {
                    // mysql0000-00-00 00:00:00?mysql
                    // driver?Timestamp
                    param = column.getColumnValue();
                } else {
                    param = SqlUtils.stringToSqlValue(column.getColumnValue(), sqlType, isRequired,
                            dbDialect.isEmptyStringNulled());
                }

                try {
                    switch (sqlType) {
                    case Types.CLOB:
                        lobCreator.setClobAsString(ps, paramIndex, (String) param);
                        break;

                    case Types.BLOB:
                        lobCreator.setBlobAsBytes(ps, paramIndex, (byte[]) param);
                        break;
                    case Types.TIME:
                    case Types.TIMESTAMP:
                    case Types.DATE:
                        // ??mysqloracle?
                        if (dbDialect instanceof MysqlDialect) {
                            // mysql0000-00-00 00:00:00?mysql
                            // driver?Timestamp
                            ps.setObject(paramIndex, param);
                        } else {
                            StatementCreatorUtils.setParameterValue(ps, paramIndex, sqlType, null, param);
                        }
                        break;
                    case Types.BIT:
                        // ??mysqlbitbit64??BigInteger???
                        // mysql driverbitsetInt??
                        if (dbDialect instanceof MysqlDialect) {
                            StatementCreatorUtils.setParameterValue(ps, paramIndex, Types.DECIMAL, null, param);
                        } else {
                            StatementCreatorUtils.setParameterValue(ps, paramIndex, sqlType, null, param);
                        }
                        break;
                    default:
                        StatementCreatorUtils.setParameterValue(ps, paramIndex, sqlType, null, param);
                        break;
                    }
                } catch (SQLException ex) {
                    logger.error("## SetParam error , [pairId={}, sqltype={}, value={}]",
                            new Object[] { data.getPairId(), sqlType, param });
                    throw ex;
                }
            }
        }

        private void processStat(EventData data, int affect, boolean batch) {
            if (batch && (affect < 1 && affect != Statement.SUCCESS_NO_INFO)) {
                failedDatas.add(data); // ??
            } else if (!batch && affect < 1) {
                failedDatas.add(data);// ??
            } else {
                processedDatas.add(data); // ?commit????
                DbLoadAction.this.processStat(data, context);
            }
        }

        // 
        private void processFailedDatas(int index) {
            allFailedDatas.addAll(failedDatas);// 
            context.getFailedDatas().addAll(allFailedDatas);// ?
            for (; index < datas.size(); index++) { // ??
                context.getFailedDatas().add(datas.get(index));
            }
            // ?????rollback?processDatascommit
            // (bugfix)
            allProcesedDatas.addAll(processedDatas);
            context.getProcessedDatas().addAll(allProcesedDatas);// ??
        }

    }

    private void processStat(EventData data, DbLoadContext context) {
        LoadThroughput throughput = loadStatsTracker.getStat(context.getIdentity());
        LoadCounter counter = throughput.getStat(data.getPairId());
        EventType type = data.getEventType();
        if (type.isInsert()) {
            counter.getInsertCount().incrementAndGet();
        } else if (type.isUpdate()) {
            counter.getUpdateCount().incrementAndGet();
        } else if (type.isDelete()) {
            counter.getDeleteCount().incrementAndGet();
        }

        counter.getRowCount().incrementAndGet();
        counter.getRowSize().addAndGet(calculateSize(data));
    }

    // row?
    private long calculateSize(EventData data) {
        // long size = 0L;
        // size += data.getKeys().toString().getBytes().length - 12 -
        // data.getKeys().size() + 1L;
        // size += data.getColumns().toString().getBytes().length - 12 -
        // data.getKeys().size() + 1L;
        // return size;

        // byte[] bytes = JsonUtils.marshalToByte(data);// ???
        // return bytes.length;

        return data.getSize();// ??????
    }

    // =============== setter / getter ===============

    public void setPoolSize(int poolSize) {
        this.poolSize = poolSize;
    }

    public void setRetry(int retry) {
        this.retry = retry;
    }

    public void setRetryWait(int retryWait) {
        this.retryWait = retryWait;
    }

    public void setInterceptor(LoadInterceptor interceptor) {
        this.interceptor = interceptor;
    }

    public void setDbDialectFactory(DbDialectFactory dbDialectFactory) {
        this.dbDialectFactory = dbDialectFactory;
    }

    public void setConfigClientService(ConfigClientService configClientService) {
        this.configClientService = configClientService;
    }

    public void setLoadStatsTracker(LoadStatsTracker loadStatsTracker) {
        this.loadStatsTracker = loadStatsTracker;
    }

    public void setUseBatch(boolean useBatch) {
        this.useBatch = useBatch;
    }

}