Example usage for org.springframework.data.redis.support.atomic RedisAtomicLong incrementAndGet

List of usage examples for org.springframework.data.redis.support.atomic RedisAtomicLong incrementAndGet

Introduction

In this page you can find the example usage for org.springframework.data.redis.support.atomic RedisAtomicLong incrementAndGet.

Prototype

public long incrementAndGet() 

Source Link

Document

Atomically increment by one the current value.

Usage

From source file:com.miko.demo.mongo.service.RedisInitTest.java

@Test
public void testAtomicCounters() {
    RedisAtomicLong counter = new RedisAtomicLong("miko-spring-mongo:counter-test", connectionFactory, 0);
    Long l = counter.incrementAndGet();

    assertThat(l, is(greaterThan(0L)));/*  w ww  .  jav a2 s .c  o m*/
}

From source file:com.miko.demo.postgresql.service.RedisInitTest.java

@Test
public void testAtomicCounters() {
    RedisAtomicLong counter = new RedisAtomicLong("miko-spring-postgresql:counter-test", connectionFactory, 0);
    Long l = counter.incrementAndGet();

    assertThat(l, is(greaterThan(0L)));/*  www .  j av a2 s  .  c o  m*/
}

From source file:org.shareok.data.redis.UserDaoImpl.java

@Override
@Transactional//from w  w  w  .  j ava 2 s  .  co  m
public RedisUser addUser(final RedisUser user) {
    try {
        redisTemplate.setConnectionFactory(connectionFactory);
        RedisUser existedUser = findUserByUserEmail(user.getEmail());
        if (null != existedUser) {
            return existedUser;
        }

        RedisAtomicLong userIdIndex = new RedisAtomicLong(ShareokdataManager.getRedisGlobalUidSchema(),
                redisTemplate.getConnectionFactory());
        long uidCount = userIdIndex.incrementAndGet();
        final String uid = String.valueOf(uidCount);

        List<Object> results = redisTemplate.execute(new SessionCallback<List<Object>>() {
            @Override
            public List<Object> execute(RedisOperations operations) throws DataAccessException {
                operations.multi();
                operations.boundHashOps("user:" + uid);
                operations.opsForHash().put("user:" + uid, "userName",
                        (null != user.getUserName() ? user.getUserName() : user.getEmail()));
                operations.opsForHash().put("user:" + uid, "email", user.getEmail());
                operations.opsForHash().put("user:" + uid, "userId", uid);
                operations.opsForHash().put("user:" + uid, "password", user.getPassword());
                operations.opsForHash().put("user:" + uid, "isActive", String.valueOf(true));
                operations.opsForHash().put("user:" + uid, "sessionKey",
                        (null != user.getSessionKey() ? user.getSessionKey() : ""));
                operations.opsForHash().put("user:" + uid, "startTime",
                        (null != user.getStartTime()
                                ? ShareokdataManager.getSimpleDateFormat().format(user.getStartTime())
                                : (ShareokdataManager.getSimpleDateFormat().format(new Date()))));

                operations.boundHashOps("users");
                operations.opsForHash().put("users", user.getEmail(), uid);

                List<Object> userList = operations.exec();
                if (!userList.get(0).equals(true)) {
                    operations.discard();
                }
                return userList;
            }
        });
    } catch (Exception ex) {
        logger.error("Cannot add new user", ex);
    }
    return user;
}

From source file:org.shareok.data.redis.job.JobDaoImpl.java

@Override
@Transactional//from  w ww. j a  va2  s  . c o m
public long startJob(long uid, int jobType, int repoType, int serverId, Date startTime) {

    long jobIdCount = -1;

    try {
        redisTemplate.setConnectionFactory(connectionFactory);

        RedisAtomicLong jobIdIndex = new RedisAtomicLong(ShareokdataManager.getRedisGlobalJobIdSchema(),
                redisTemplate.getConnectionFactory());

        jobIdCount = jobIdIndex.incrementAndGet();
        final String jobId = String.valueOf(jobIdCount);
        final String uidStr = String.valueOf(uid);
        final String jobTypeStr = String.valueOf(jobType);
        final String repoTypeStr = String.valueOf(repoType);
        final String serverIdStr = String.valueOf(serverId);
        final Date startTimeStr = startTime;

        List<Object> results = redisTemplate.execute(new SessionCallback<List<Object>>() {
            @Override
            public List<Object> execute(RedisOperations operations) throws DataAccessException {
                operations.multi();
                operations.boundHashOps("job:" + jobId);
                operations.opsForHash().put("job:" + jobId, "userId", uidStr);
                operations.opsForHash().put("job:" + jobId, "jobId", jobId);
                operations.opsForHash().put("job:" + jobId, "status", "4");
                operations.opsForHash().put("job:" + jobId, "type", jobTypeStr);
                operations.opsForHash().put("job:" + jobId, "repoType", repoTypeStr);
                operations.opsForHash().put("job:" + jobId, "startTime",
                        (null != startTimeStr ? ShareokdataManager.getSimpleDateFormat().format(startTimeStr)
                                : ShareokdataManager.getSimpleDateFormat().format(new Date())));
                operations.opsForHash().put("job:" + jobId, "endTime", "");
                operations.opsForHash().put("job:" + jobId, "serverId", serverIdStr);

                operations.boundSetOps("user_" + uidStr + "_job_set").add(jobId);

                List<Object> jobList = operations.exec();
                if (!jobList.get(0).equals(true)) {
                    operations.discard();
                }
                return jobList;
            }
        });
    } catch (Exception ex) {
        logger.error("Cannot start a new job.", ex);
    }
    return jobIdCount;
}

From source file:org.shareok.data.redis.job.JobDaoImpl.java

@Override
public RedisJob createJob(final long uid, final int jobType, final Map<String, String> values) {
    long jobIdCount = -1;
    final RedisJob newJob = new RedisJob();

    try {/*from   w  w w.ja  va  2  s.c o  m*/
        redisTemplate.setConnectionFactory(connectionFactory);

        RedisAtomicLong jobIdIndex = new RedisAtomicLong(ShareokdataManager.getRedisGlobalJobIdSchema(),
                redisTemplate.getConnectionFactory());

        jobIdCount = jobIdIndex.incrementAndGet();
        final String jobId = String.valueOf(jobIdCount);
        final String uidStr = String.valueOf(uid);
        final String jobTypeStr = String.valueOf(jobType);
        final Date startTime = new Date();

        List<Object> results = redisTemplate.execute(new SessionCallback<List<Object>>() {
            @Override
            public List<Object> execute(RedisOperations operations) throws DataAccessException {
                operations.multi();
                operations.boundHashOps("job:" + jobId);
                operations.opsForHash().put("job:" + jobId, "userId", uidStr);
                operations.opsForHash().put("job:" + jobId, "jobId", jobId);
                operations.opsForHash().put("job:" + jobId, "status", "4");
                operations.opsForHash().put("job:" + jobId, "type", jobTypeStr);
                operations.opsForHash().put("job:" + jobId, "startTime",
                        ShareokdataManager.getSimpleDateFormat().format(startTime));
                operations.opsForHash().put("job:" + jobId, "endTime", "");
                if (null != values && values.size() > 0) {
                    for (String key : values.keySet()) {
                        String value = (null != values.get(key)) ? (String) values.get(key) : "";
                        operations.opsForHash().put("job:" + jobId, key, value);
                    }
                }

                operations.boundSetOps("user_" + uidStr + "_job_set").add(jobId);

                List<Object> jobList = operations.exec();
                if (!jobList.get(0).equals(true)) {
                    operations.discard();
                }
                return jobList;
            }

        });
        newJob.setJobId(jobIdCount);
        newJob.setType(jobType);
        newJob.setStartTime(startTime);
        newJob.setUserId(uid);
        newJob.setData(values);
    } catch (Exception ex) {
        logger.error("Cannot start a new job.", ex);
    }
    return newJob;
}

From source file:org.shareok.data.redis.job.JobDaoImpl.java

@Override
public RedisJob saveJob(final RedisJob job) {

    long jobIdCount = -1;

    try {//  www .  ja  v a  2  s  . c om
        redisTemplate.setConnectionFactory(connectionFactory);

        RedisAtomicLong jobIdIndex = new RedisAtomicLong(ShareokdataManager.getRedisGlobalJobIdSchema(),
                redisTemplate.getConnectionFactory());

        jobIdCount = jobIdIndex.incrementAndGet();
        final String jobId = String.valueOf(jobIdCount);
        job.setJobId(jobIdCount);

        final Field[] fields = job.getClass().getDeclaredFields();
        final Field[] parentFields;
        Class parent = job.getClass().getSuperclass();
        if (null != parent) {
            parentFields = parent.getDeclaredFields();
        } else {
            parentFields = null;
        }

        List<Object> results = redisTemplate.execute(new SessionCallback<List<Object>>() {
            @Override
            public List<Object> execute(RedisOperations operations) throws DataAccessException {

                operations.multi();
                operations.boundHashOps("job:" + jobId);

                try {
                    for (Field field : fields) {
                        String key = field.getName();
                        field.setAccessible(true);
                        Object val = field.get(job);
                        String value = "";
                        if (null != val) {
                            if (val instanceof Date) {
                                value = ShareokdataManager.getSimpleDateFormat().format((Date) val);
                                operations.opsForHash().put("job:" + jobId, key, value);
                            } else if (val instanceof Map) {
                                continue;
                            } else {
                                value = String.valueOf(val);
                                operations.opsForHash().put("job:" + jobId, key, value);
                            }
                        } else if (!(val instanceof Map)) {
                            operations.opsForHash().put("job:" + jobId, key, value);
                        }
                    }
                    if (null != parentFields) {
                        for (Field parentField : parentFields) {
                            String key = parentField.getName();
                            parentField.setAccessible(true);
                            Object val = parentField.get(job);
                            String value = "";
                            if (null != val) {
                                if (val instanceof Date) {
                                    value = ShareokdataManager.getSimpleDateFormat().format((Date) val);
                                    operations.opsForHash().put("job:" + jobId, key, value);
                                } else if (val instanceof Map) {
                                    continue;
                                } else {
                                    value = String.valueOf(val);
                                    operations.opsForHash().put("job:" + jobId, key, value);
                                }
                            } else if (!(val instanceof Map)) {
                                operations.opsForHash().put("job:" + jobId, key, value);
                            }
                        }
                    }
                } catch (IllegalArgumentException | IllegalAccessException ex) {
                    logger.error("Cannot sace a new job with illegal access to certain job field values.", ex);
                }

                operations.boundSetOps("user_" + String.valueOf(job.getUserId()) + "_job_set").add(jobId);

                List<Object> jobList = operations.exec();
                if (!jobList.get(0).equals(true)) {
                    operations.discard();
                }
                return jobList;
            }
        });
    } catch (Exception ex) {
        logger.error("Cannot save a new job.", ex);
    }
    return job;
}