ei.ne.ke.cassandra.cql3.AstyanaxCql3Repository.java Source code

Java tutorial

Introduction

Here is the source code for ei.ne.ke.cassandra.cql3.AstyanaxCql3Repository.java

Source

/*
 * Copyright 2013 EK3 Technologies, Inc.
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package ei.ne.ke.cassandra.cql3;

import java.io.Serializable;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DataRetrievalFailureException;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.repository.NoRepositoryBean;

import com.google.common.base.Preconditions;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.mysema.query.types.OrderSpecifier;
import com.mysema.query.types.Predicate;
import com.netflix.astyanax.AstyanaxContext;
import com.netflix.astyanax.Keyspace;
import com.netflix.astyanax.connectionpool.OperationResult;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import com.netflix.astyanax.model.ColumnList;
import com.netflix.astyanax.model.ConsistencyLevel;
import com.netflix.astyanax.model.CqlResult;
import com.netflix.astyanax.model.Row;
import com.netflix.astyanax.model.Rows;
import com.netflix.astyanax.query.CqlQuery;
import com.netflix.astyanax.query.PreparedCqlQuery;

/**
 * Uses Netflix(TM) Astyanax to implement the Spring repository abstraction.
 * @param <T> the entity type
 * @param <ID> the key type
 */
@NoRepositoryBean
public class AstyanaxCql3Repository<T, ID extends Serializable> implements CassandraRepository<T, ID> {

    public static final ConsistencyLevel DEFAULT_READ_CONSISTENCY_LEVEL = ConsistencyLevel.CL_ONE;
    public static final ConsistencyLevel DEFAULT_WRITE_CONSISTENCY_LEVEL = ConsistencyLevel.CL_ONE;
    public static final boolean DEFAULT_USE_COMPRESSION = false;
    public static final int DEFAULT_BATCH_SIZE = 50;
    public static final int DEFAULT_NUM_THREADS = 5;

    protected static final Logger LOGGER = LoggerFactory.getLogger(AstyanaxCql3Repository.class);

    private final EntitySpecification<T, ID> spec;
    private final Cql3StatementGenerator<T, ID> cqlGen;
    private ConsistencyLevel readConsistencyLevel;
    private ConsistencyLevel writeConsistencyLevel;
    private boolean useCompression;
    private Keyspace keyspace;
    private int batchSize;
    private int numThreads;
    private ExecutorService executorService;

    /**
     * Constructor.
     */
    public AstyanaxCql3Repository() {
        Type genericSuperclass = getClass().getGenericSuperclass();
        if (genericSuperclass instanceof ParameterizedType) {
            this.spec = EntitySpecificationFactory.get((ParameterizedType) genericSuperclass);
        } else {
            throw new IllegalStateException("Unpossible!");
        }
        this.cqlGen = new CachingCql3StatementGenerator<T, ID>(this.spec);
        this.readConsistencyLevel = DEFAULT_READ_CONSISTENCY_LEVEL;
        this.writeConsistencyLevel = DEFAULT_WRITE_CONSISTENCY_LEVEL;
        this.useCompression = DEFAULT_USE_COMPRESSION;
        this.batchSize = DEFAULT_BATCH_SIZE;
        this.numThreads = DEFAULT_NUM_THREADS;
        this.executorService = Executors.newFixedThreadPool(this.numThreads);
    }

    /**
     * Consumes the given list of {@link Future}S and waits until they have all
     * completed.
     * @param futureTasks the futures to consume
     */
    private <U> void waitUntilCompletion(List<Future<U>> futureTasks) {
        for (Future<U> futureTask : futureTasks) {
            try {
                futureTask.get();
            } catch (InterruptedException e) {
                Thread.interrupted();
            } catch (ExecutionException e) {
                throw new RuntimeException(e);
            }
        }
    }

    /**
     * Sets up the keyspace for a regular CQL3 read statement.
     * @param cql the CQL3 query.
     * @return an Astyanax {@link CqlQuery} object.
     */
    protected CqlQuery<String, String> doCqlRead(String cql) {
        Preconditions.checkNotNull(cql);
        LOGGER.debug("CQL3 statement: {}", cql);
        CqlQuery<String, String> query = keyspace.prepareQuery(spec.getColumnFamily())
                .setConsistencyLevel(readConsistencyLevel).withCql(cql);
        if (useCompression) {
            query = query.useCompression();
        }
        return query;
    }

    /**
     * Sets up the keyspace for a regular CQL3 write statement.
     * @param cql the CQL3 query.
     * @return an Astyanax {@link CqlQuery} object.
     */
    protected CqlQuery<String, String> doCqlWrite(String cql) {
        Preconditions.checkNotNull(cql);
        LOGGER.debug("CQL3 statement: {}", cql);
        CqlQuery<String, String> query = keyspace.prepareQuery(spec.getColumnFamily())
                .setConsistencyLevel(writeConsistencyLevel).withCql(cql);
        if (useCompression) {
            query = query.useCompression();
        }
        return query;
    }

    /**
     * Sets up the keyspace for a prepared CQL3 read statement.
     * @param cql the prepared CQL3 query.
     * @return an Astyanax {@link PreparedCqlQuery} object.
     */
    protected PreparedCqlQuery<String, String> doPreparedCqlRead(String cql) {
        Preconditions.checkNotNull(cql);
        LOGGER.debug("CQL3 prepared statement: {}", cql);
        CqlQuery<String, String> query = keyspace.prepareQuery(spec.getColumnFamily())
                .setConsistencyLevel(readConsistencyLevel).withCql(cql);
        if (useCompression) {
            query = query.useCompression();
        }
        return query.asPreparedStatement();
    }

    /**
     * Sets up the keyspace for a prepared CQL3 write statement.
     * @param cql the prepared CQL3 query.
     * @return an Astyanax {@link PreparedCqlQuery} object.
     */
    protected PreparedCqlQuery<String, String> doPreparedCqlWrite(String cql) {
        Preconditions.checkNotNull(cql);
        LOGGER.debug("CQL3 prepared statement: {}", cql);
        CqlQuery<String, String> query = keyspace.prepareQuery(spec.getColumnFamily())
                .setConsistencyLevel(writeConsistencyLevel).withCql(cql);
        if (useCompression) {
            query = query.useCompression();
        }
        return query.asPreparedStatement();
    }

    /**
     * @param keyspaceContext .
     */
    @Autowired
    protected void setKeyspaceContext(AstyanaxContext<Keyspace> keyspaceContext) {
        this.keyspace = keyspaceContext.getClient();
    }

    /**
     * @return the spec
     */
    protected EntitySpecification<T, ID> getSpec() {
        return spec;
    }

    protected List<? extends T> doDelete(Iterable<? extends T> entities) {
        try {
            int count = Iterables.size(entities);
            List<T> result = Lists.newArrayListWithExpectedSize(count);
            String cql = cqlGen.buildDeleteStatement(count);
            PreparedCqlQuery<String, String> preparedStatement = doPreparedCqlWrite(cql);
            for (T entity : entities) {
                Map<String, ByteBuffer> serializedKeyValues = spec.getSerializedKeyValues(entity);
                for (String column : spec.getKeyColumns()) {
                    preparedStatement = preparedStatement.withValue(serializedKeyValues.get(column));
                }
                result.add(entity);
            }
            OperationResult<CqlResult<String, String>> opResult = preparedStatement.execute();
            LOGGER.debug("attempts: {}, latency: {}ms", opResult.getAttemptsCount(),
                    opResult.getLatency(TimeUnit.MILLISECONDS));
            return result;
        } catch (ConnectionException e) {
            throw new DataRetrievalFailureException("Error while executing CQL3 query", e);
        }
    }

    protected List<T> doFindAll(Iterable<ID> ids) {
        try {
            int count = Iterables.size(ids);
            List<T> result = Lists.newArrayListWithExpectedSize(count);
            for (ID id : ids) {
                Map<String, ByteBuffer> serializedKeyValues = spec.getSerializedKeyValues(id);
                List<String> columnsSet = EntitySpecificationUtils.getKeysSet(serializedKeyValues);
                String cql = cqlGen.buildFindAllStatement(columnsSet);
                PreparedCqlQuery<String, String> preparedStatement = doPreparedCqlRead(cql);
                for (String column : columnsSet) {
                    preparedStatement = preparedStatement.withValue(serializedKeyValues.get(column));
                }
                OperationResult<CqlResult<String, String>> opResult = preparedStatement.execute();
                LOGGER.debug("attempts: {}, latency: {}ms", opResult.getAttemptsCount(),
                        opResult.getLatency(TimeUnit.MILLISECONDS));
                CqlResult<String, String> cqlResult = opResult.getResult();
                result.addAll(spec.map(cqlResult.getRows()));
            }
            return result;
        } catch (ConnectionException e) {
            throw new DataRetrievalFailureException("Error while executing CQL3 query", e);
        }
    }

    protected <S extends T> List<S> doSave(Iterable<S> entities) {
        try {
            List<S> result = Lists.newArrayListWithCapacity(Iterables.size(entities));
            int count = Iterables.size(entities);
            String cql = cqlGen.buildSaveStatement(count);
            PreparedCqlQuery<String, String> preparedStatement = doPreparedCqlWrite(cql);
            for (S entity : entities) {
                List<ByteBuffer> serializedEntity = spec.map(entity);
                for (ByteBuffer buf : serializedEntity) {
                    preparedStatement = preparedStatement.withValue(buf);
                }
                result.add(entity);
            }
            OperationResult<CqlResult<String, String>> opResult = preparedStatement.execute();
            LOGGER.debug("attempts: {}, latency: {}ms", opResult.getAttemptsCount(),
                    opResult.getLatency(TimeUnit.MILLISECONDS));
            return result;
        } catch (ConnectionException e) {
            throw new DataRetrievalFailureException("Error while executing CQL3 query", e);
        }
    }

    protected List<T> doFindAll(ID restrict, Sort sort) {
        try {
            Map<String, ByteBuffer> serializedKeyValues = spec.getSerializedKeyValues(restrict);
            List<String> keysSet = EntitySpecificationUtils.getKeysSet(serializedKeyValues);
            String keysCql = cqlGen.buildLimitedFindAllKeysStatement(keysSet, sort, 0);
            PreparedCqlQuery<String, String> preparedStatement = doPreparedCqlRead(keysCql);
            for (String column : keysSet) {
                preparedStatement = preparedStatement.withValue(serializedKeyValues.get(column));
            }
            OperationResult<CqlResult<String, String>> keysResult = preparedStatement.execute();
            LOGGER.debug("attempts: {}, latency: {}ms", keysResult.getAttemptsCount(),
                    keysResult.getLatency(TimeUnit.MILLISECONDS));
            CqlResult<String, String> cqlKeysResult = keysResult.getResult();
            Rows<String, String> keysSetRows = cqlKeysResult.getRows();
            List<T> keysAsEnts = spec.map(keysSetRows);
            List<ID> keys = spec.getKey(keysAsEnts);
            return findAll(keys);
        } catch (ConnectionException e) {
            throw new DataRetrievalFailureException("Error while executing CQL3 query", e);
        }
    }

    protected Page<T> doFindAll(ID restrict, Pageable pageable) {
        /*
         * Example #1
         *   pageNumber = 0
         *   pageSize = 25
         *   offset = 0
         *   => start row = 0
         *   => end row = 24 (including)
         *
         * Example #2
         *   pageNumber = 1
         *   pageSize = 25
         *   offset = 0
         *   => start row = 25
         *   => end row = 49 (including)
         *
         * Example #3
         *   pageNumber = 1
         *   pageSize = 25
         *   offset = 10
         *   => start row = 35
         *   => end row = 59 (including)
         */
        try {
            int pageNumber = pageable.getPageNumber();
            int pageSize = pageable.getPageSize();
            int offset = pageable.getOffset();
            int firstRow = pageNumber * pageSize + offset;
            int lastRow = (pageNumber + 1) * pageSize + offset;
            Map<String, ByteBuffer> serializedKeyValues = spec.getSerializedKeyValues(restrict);
            List<String> keysSet = EntitySpecificationUtils.getKeysSet(serializedKeyValues);
            String keysCql = cqlGen.buildLimitedFindAllKeysStatement(keysSet, pageable.getSort(), lastRow);
            PreparedCqlQuery<String, String> preparedStatement = doPreparedCqlRead(keysCql);
            for (String column : keysSet) {
                preparedStatement = preparedStatement.withValue(serializedKeyValues.get(column));
            }
            OperationResult<CqlResult<String, String>> keysResult = preparedStatement.execute();
            LOGGER.debug("attempts: {}, latency: {}ms", keysResult.getAttemptsCount(),
                    keysResult.getLatency(TimeUnit.MILLISECONDS));
            CqlResult<String, String> cqlKeysResult = keysResult.getResult();
            Rows<String, String> keysSetRows = cqlKeysResult.getRows();
            List<T> keysAsEnts = Lists.newArrayListWithExpectedSize(lastRow - firstRow + 1);
            for (int i = firstRow; i < keysSetRows.size() && i < lastRow; i++) {
                keysAsEnts.add(spec.map(keysSetRows.getRowByIndex(i).getColumns()));
            }
            List<ID> keys = spec.getKey(keysAsEnts);
            return new PageImpl<T>((List<T>) findAll(keys), pageable, count(restrict));
        } catch (ConnectionException e) {
            throw new DataRetrievalFailureException("Error while executing CQL3 query", e);
        }
    }

    protected Page<T> doFindAll(Predicate predicate, Pageable pageable) {
        try {
            int pageNumber = pageable.getPageNumber();
            int pageSize = pageable.getPageSize();
            int offset = pageable.getOffset();
            int firstRow = pageNumber * pageSize + offset;
            int lastRow = (pageNumber + 1) * pageSize + offset;
            Map<String, ByteBuffer> serializedValues = Maps.newLinkedHashMap();
            predicate.accept(new PredicateSerializerVisitor(), serializedValues);
            String cql = cqlGen.buildFindAllStatement(Lists.newArrayList(serializedValues.keySet()));
            PreparedCqlQuery<String, String> preparedStatement = doPreparedCqlRead(cql);
            for (Map.Entry<String, ByteBuffer> entry : serializedValues.entrySet()) {
                preparedStatement = preparedStatement.withValue(entry.getValue());
            }
            OperationResult<CqlResult<String, String>> opResult = preparedStatement.execute();
            LOGGER.debug("attempts: {}, latency: {}ms", opResult.getAttemptsCount(),
                    opResult.getLatency(TimeUnit.MILLISECONDS));
            CqlResult<String, String> cqlResult = opResult.getResult();
            List<T> elements = Lists.newArrayList(spec.map(cqlResult.getRows()));
            List<T> result = Lists.newArrayListWithExpectedSize(lastRow - firstRow + 1);
            for (int i = firstRow; i < elements.size() && i < lastRow; i++) {
                result.add(elements.get(i));
            }
            return new PageImpl<T>(result, pageable, doCount(predicate));
        } catch (ConnectionException e) {
            throw new DataRetrievalFailureException("Error while executing CQL3 query", e);
        }
    }

    protected long doCount(Predicate predicate) {
        try {
            Map<String, ByteBuffer> serializedValues = Maps.newLinkedHashMap();
            predicate.accept(new PredicateSerializerVisitor(), serializedValues);
            String cql = cqlGen.buildCountStatement(Lists.newArrayList(serializedValues.keySet()));
            PreparedCqlQuery<String, String> preparedStatement = doPreparedCqlRead(cql);
            for (Map.Entry<String, ByteBuffer> entry : serializedValues.entrySet()) {
                preparedStatement = preparedStatement.withValue(entry.getValue());
            }
            OperationResult<CqlResult<String, String>> opResult = preparedStatement.execute();
            LOGGER.debug("attempts: {}, latency: {}ms", opResult.getAttemptsCount(),
                    opResult.getLatency(TimeUnit.MILLISECONDS));
            return opResult.getResult().getRows().getRowByIndex(0).getColumns().getColumnByName("count")
                    .getLongValue();
        } catch (ConnectionException e) {
            throw new DataRetrievalFailureException("Error while executing CQL3 query", e);
        }
    }

    /**
     * @return the readConsistencyLevel
     */
    public synchronized ConsistencyLevel getReadConsistencyLevel() {
        return readConsistencyLevel;
    }

    /**
     * @param readConsistencyLevel the readConsistencyLevel to set
     */
    public synchronized void setReadConsistencyLevel(ConsistencyLevel readConsistencyLevel) {
        if (readConsistencyLevel == null) {
            this.readConsistencyLevel = DEFAULT_READ_CONSISTENCY_LEVEL;
        } else {
            this.readConsistencyLevel = readConsistencyLevel;
        }
    }

    /**
     * @return the writeConsistencyLevel
     */
    public synchronized ConsistencyLevel getWriteConsistencyLevel() {
        return writeConsistencyLevel;
    }

    /**
     * @param writeConsistencyLevel the writeConsistencyLevel to set
     */
    public synchronized void setWriteConsistencyLevel(ConsistencyLevel writeConsistencyLevel) {
        if (writeConsistencyLevel == null) {
            this.writeConsistencyLevel = DEFAULT_WRITE_CONSISTENCY_LEVEL;
        } else {
            this.writeConsistencyLevel = writeConsistencyLevel;
        }
    }

    /**
     * @return the useCompression
     */
    public synchronized boolean isUseCompression() {
        return useCompression;
    }

    /**
     * @param useCompression the useCompression to set
     */
    public synchronized void setUseCompression(boolean useCompression) {
        this.useCompression = useCompression;
    }

    /**
     * @return the batchSize
     */
    public synchronized int getBatchSize() {
        return batchSize;
    }

    /**
     * @param batchSize the batchSize to set
     */
    public synchronized void setBatchSize(int batchSize) {
        this.batchSize = batchSize;
    }

    /**
     * @return the numThreads
     */
    public synchronized int getNumThreads() {
        return numThreads;
    }

    /**
     * @param numThreads the numThreads to set
     */
    public synchronized void setNumThreads(int numThreads) {
        Preconditions.checkArgument(numThreads > 0, "numThreads must be positive (is %s)", numThreads);
        this.executorService.shutdown();
        this.numThreads = numThreads;
        this.executorService = Executors.newFixedThreadPool(this.numThreads);
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public synchronized long count() {
        try {
            String cql = cqlGen.buildCountStatement();
            OperationResult<CqlResult<String, String>> opResult = doCqlRead(cql).execute();
            LOGGER.debug("attempts: {}, latency: {}ms", opResult.getAttemptsCount(),
                    opResult.getLatency(TimeUnit.MILLISECONDS));
            return opResult.getResult().getRows().getRowByIndex(0).getColumns().getColumnByName("count")
                    .getLongValue();
        } catch (ConnectionException e) {
            throw new DataRetrievalFailureException("Error while executing CQL3 query", e);
        }
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public synchronized void delete(ID id) {
        try {
            String cql = cqlGen.buildDeleteStatement();
            PreparedCqlQuery<String, String> preparedStatement = doPreparedCqlWrite(cql);
            Map<String, ByteBuffer> serializedKeyValues = spec.getSerializedKeyValues(id);
            for (String column : spec.getKeyColumns()) {
                preparedStatement = preparedStatement.withValue(serializedKeyValues.get(column));
            }
            OperationResult<CqlResult<String, String>> opResult = preparedStatement.execute();
            LOGGER.debug("attempts: {}, latency: {}ms", opResult.getAttemptsCount(),
                    opResult.getLatency(TimeUnit.MILLISECONDS));
        } catch (ConnectionException e) {
            throw new DataRetrievalFailureException("Error while executing CQL3 query", e);
        }
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public synchronized void delete(T entity) {
        delete(spec.getKey(entity));
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public synchronized void delete(Iterable<? extends T> entities) {
        int count = Iterables.size(entities);
        List<Callable<List<? extends T>>> todo = Lists.newArrayListWithExpectedSize(count / batchSize);
        for (Iterable<? extends T> partition : Iterables.partition(entities, batchSize)) {
            todo.add(new Deleter(partition));
        }
        try {
            List<Future<List<? extends T>>> futureResults = executorService.invokeAll(todo);
            waitUntilCompletion(futureResults);
        } catch (InterruptedException e) {
            Thread.interrupted();
        }
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public synchronized void deleteAll() {
        try {
            String cql = cqlGen.buildDeleteAllStatement();
            OperationResult<CqlResult<String, String>> opResult = doCqlWrite(cql).execute();
            LOGGER.debug("attempts: {}, latency: {}ms", opResult.getAttemptsCount(),
                    opResult.getLatency(TimeUnit.MILLISECONDS));
        } catch (ConnectionException e) {
            throw new DataRetrievalFailureException("Error while executing CQL3 query", e);
        }
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public synchronized boolean exists(ID id) {
        try {
            String cql = cqlGen.buildExistsStatement();
            PreparedCqlQuery<String, String> preparedStatement = doPreparedCqlRead(cql);
            Map<String, ByteBuffer> serializedKeyValues = spec.getSerializedKeyValues(id);
            for (String column : spec.getKeyColumns()) {
                preparedStatement = preparedStatement.withValue(serializedKeyValues.get(column));
            }
            OperationResult<CqlResult<String, String>> opResult = preparedStatement.execute();
            LOGGER.debug("attempts: {}, latency: {}ms", opResult.getAttemptsCount(),
                    opResult.getLatency(TimeUnit.MILLISECONDS));
            return opResult.getResult().getRows().size() > 0;
        } catch (ConnectionException e) {
            throw new DataRetrievalFailureException("Error while executing CQL3 query", e);
        }
    }

    /**
     * {@inheritDoc}
     * @see https://github.com/Netflix/astyanax/wiki/All-rows-query
     */
    @Override
    public synchronized List<T> findAll() {
        try {
            String cql = cqlGen.buildFindAllStatement();
            OperationResult<CqlResult<String, String>> opResult = doCqlRead(cql).execute();
            LOGGER.debug("attempts: {}, latency: {}ms", opResult.getAttemptsCount(),
                    opResult.getLatency(TimeUnit.MILLISECONDS));
            CqlResult<String, String> cqlResult = opResult.getResult();
            return spec.map(cqlResult.getRows());
        } catch (ConnectionException e) {
            throw new DataRetrievalFailureException("Error while executing CQL3 query", e);
        }
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public List<T> findAll(Iterable<ID> ids) {
        List<T> entities = Lists.newArrayListWithExpectedSize(Iterables.size(ids));
        List<Callable<List<T>>> todo = Lists.newArrayListWithExpectedSize(Iterables.size(entities) / batchSize);
        Iterable<List<ID>> partitions = Iterables.partition(ids, batchSize);
        for (List<ID> partition : partitions) {
            todo.add(new Finder(partition));
        }
        try {
            List<Future<List<T>>> futureResults = executorService.invokeAll(todo);
            for (Future<List<T>> futureResult : futureResults) {
                try {
                    entities.addAll(futureResult.get());
                } catch (ExecutionException e) {
                    throw new RuntimeException(e);
                }
            }
        } catch (InterruptedException e) {
            Thread.interrupted();
        }
        return entities;
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public synchronized <S extends T> S save(S entity) {
        try {
            String cql = cqlGen.buildSaveStatement();
            PreparedCqlQuery<String, String> preparedStatement = doPreparedCqlWrite(cql);
            List<ByteBuffer> serializedEntity = spec.map(entity);
            for (ByteBuffer buf : serializedEntity) {
                preparedStatement = preparedStatement.withValue(buf);
            }
            OperationResult<CqlResult<String, String>> opResult = preparedStatement.execute();
            LOGGER.debug("attempts: {}, latency: {}ms", opResult.getAttemptsCount(),
                    opResult.getLatency(TimeUnit.MILLISECONDS));
            return entity;
        } catch (ConnectionException e) {
            throw new DataRetrievalFailureException("Error while executing CQL3 query", e);
        }
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public synchronized <S extends T> List<S> save(Iterable<S> entities) {
        List<Callable<List<S>>> todo = Lists.newArrayListWithExpectedSize(Iterables.size(entities) / batchSize);
        for (Iterable<S> partition : Iterables.partition(entities, batchSize)) {
            todo.add(new Saver<S>(partition));
        }
        try {
            List<Future<List<S>>> futureResults = executorService.invokeAll(todo);
            waitUntilCompletion(futureResults);
        } catch (InterruptedException e) {
            Thread.interrupted();
        }
        return (List<S>) entities;
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public synchronized List<T> findAll(Sort sort) {
        return findAll(null, sort);
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public synchronized Page<T> findAll(Pageable pageable) {
        return doFindAll((ID) null, pageable);
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public void deleteAllInBatch() {
        deleteAll();
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public synchronized void deleteInBatch(Iterable<T> entities) {
        delete(entities);
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public synchronized void flush() {
        /* It's a no-op for us. */
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public synchronized T saveAndFlush(T entity) {
        return save(entity);
    }

    /* Start implementation of CassandraRepository */

    /**
     * {@inheritDoc}
     */
    @Override
    public synchronized long count(ID restrict) {
        try {
            Map<String, ByteBuffer> serializedKeyValues = spec.getSerializedKeyValues(restrict);
            List<String> keysSet = EntitySpecificationUtils.getKeysSet(serializedKeyValues);
            String cql = cqlGen.buildCountStatement(keysSet);
            PreparedCqlQuery<String, String> preparedStatement = doPreparedCqlRead(cql);
            for (String column : keysSet) {
                preparedStatement = preparedStatement.withValue(serializedKeyValues.get(column));
            }
            OperationResult<CqlResult<String, String>> opResult = preparedStatement.execute();
            LOGGER.debug("attempts: {}, latency: {}ms", opResult.getAttemptsCount(),
                    opResult.getLatency(TimeUnit.MILLISECONDS));
            return opResult.getResult().getRows().getRowByIndex(0).getColumns().getColumnByName("count")
                    .getLongValue();
        } catch (ConnectionException e) {
            throw new DataRetrievalFailureException("Error while executing CQL3 query", e);
        }
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public synchronized List<T> findAll(ID restrict, Sort sort) {
        return doFindAll(restrict, sort);
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public synchronized Page<T> findAll(ID restrict, Pageable pageable) {
        return doFindAll(restrict, pageable);
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public synchronized T findOne(ID id) {
        try {
            String cql = cqlGen.buildFindOneStatement();
            PreparedCqlQuery<String, String> preparedStatement = doPreparedCqlRead(cql);
            Map<String, ByteBuffer> serializedKeyValues = spec.getSerializedKeyValues(id);
            for (String column : spec.getKeyColumns()) {
                preparedStatement = preparedStatement.withValue(serializedKeyValues.get(column));
            }
            OperationResult<CqlResult<String, String>> opResult = preparedStatement.execute();
            LOGGER.debug("attempts: {}, latency: {}ms", opResult.getAttemptsCount(),
                    opResult.getLatency(TimeUnit.MILLISECONDS));
            CqlResult<String, String> resultSet = opResult.getResult();
            Rows<String, String> resultSetRows = resultSet.getRows();
            if (resultSetRows.isEmpty()) {
                return null;
            } else if (resultSetRows.size() > 1) {
                throw new DataRetrievalFailureException("Got several rows for single key");
            } else {
                Row<String, String> row = resultSetRows.getRowByIndex(0);
                ColumnList<String> columns = row.getColumns();
                return spec.map(columns);
            }
        } catch (ConnectionException e) {
            throw new DataRetrievalFailureException("Error while executing CQL3 query", e);
        }
    }

    /* Start implementation of QueryDslPredicateExecutor */

    /**
     * {@inheritDoc}
     */
    @Override
    public synchronized T findOne(Predicate predicate) {
        throw new UnsupportedOperationException();
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public synchronized Page<T> findAll(Predicate predicate) {
        throw new UnsupportedOperationException();
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public synchronized Page<T> findAll(Predicate predicate, Pageable pageable) {
        if (predicate == null) {
            return findAll(pageable);
        } else {
            return doFindAll(predicate, pageable);
        }
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public synchronized Iterable<T> findAll(Predicate predicate, OrderSpecifier<?>... orders) {
        throw new UnsupportedOperationException();
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public long count(Predicate predicate) {
        if (predicate == null) {
            return count();
        } else {
            return doCount(predicate);
        }
    }

    /**
     * A unit of work for deleting a series of entities from the enclosed
     * Cassandra CQL3 table.
     * @param <? extends T> the entity type
     */
    private final class Deleter implements Callable<List<? extends T>> {

        /**
         * The series of entities to delete.
         */
        private final Iterable<? extends T> partition;

        /**
         * Constructor.
         * @param partition the series of entities to delete.
         */
        Deleter(Iterable<? extends T> partition) {
            this.partition = partition;
        }

        /**
         * {@inheritDoc}
         */
        @Override
        public List<? extends T> call() {
            return doDelete(partition);
        }

    }

    /**
     * A unit of work for finding a series of entities by key in the enclosed
     * Cassandra CQL3 table.
     * @param <ID> the entity key type
     */
    private final class Finder implements Callable<List<T>> {

        /**
         * The series of keys to find.
         */
        private final Iterable<ID> partition;

        /**
         * Constructor.
         * @param partition the series of keys to find
         */
        Finder(Iterable<ID> partition) {
            this.partition = partition;
        }

        /**
         * {@inheritDoc}
         */
        @Override
        public List<T> call() {
            return doFindAll(partition);
        }

    }

    /**
     * A unit of work for saving a series of entities to the enclosed Cassandra
     * CQL3 table.
     * @param <S> the entity type
     */
    private final class Saver<S extends T> implements Callable<List<S>> {

        /**
         * The series of entities to save.
         */
        private final Iterable<S> partition;

        /**
         * @param partition
         */
        Saver(Iterable<S> partition) {
            this.partition = partition;
        }

        /**
         * {@inheritDoc}
         */
        @Override
        public List<S> call() {
            return doSave(partition);
        }

    }

}