Example usage for org.springframework.data.redis.support.atomic RedisAtomicLong RedisAtomicLong

List of usage examples for org.springframework.data.redis.support.atomic RedisAtomicLong RedisAtomicLong

Introduction

In this page you can find the example usage for org.springframework.data.redis.support.atomic RedisAtomicLong RedisAtomicLong.

Prototype

public RedisAtomicLong(String redisCounter, RedisOperations<String, Long> template) 

Source Link

Document

Constructs a new RedisAtomicLong instance.

Usage

From source file:com.bazzar.base.dao.redis.ItemRepository.java

@Inject
public ItemRepository(RedisTemplate<String, Object> template) {
    this.template = template;

    if (template != null) {
        jobIdCounter = new RedisAtomicLong(KeyUtils.globalUid(), template.getConnectionFactory());
    } else {/*from w w  w.j a  va2s .  c om*/
        jobIdCounter = null;
    }
}

From source file:net.nikey.redis.UserRepository.java

/**
 * xml/*from  www .j  a  va2 s .c  o m*/
 * @param template
 */
@Inject
public UserRepository(StringRedisTemplate template) {
    this.template = template;
    valueOps = template.opsForValue();
    //list
    users = new DefaultRedisList<String>(KeyUtils.users(), template);
    //
    userIdCounter = new RedisAtomicLong(KeyUtils.globalUid(), template.getConnectionFactory());
}

From source file:org.shareok.data.redis.UserDaoImpl.java

@Override
@Transactional//from  ww  w.j av a 2  s.  com
public RedisUser addUser(final RedisUser user) {
    try {
        redisTemplate.setConnectionFactory(connectionFactory);
        RedisUser existedUser = findUserByUserEmail(user.getEmail());
        if (null != existedUser) {
            return existedUser;
        }

        RedisAtomicLong userIdIndex = new RedisAtomicLong(ShareokdataManager.getRedisGlobalUidSchema(),
                redisTemplate.getConnectionFactory());
        long uidCount = userIdIndex.incrementAndGet();
        final String uid = String.valueOf(uidCount);

        List<Object> results = redisTemplate.execute(new SessionCallback<List<Object>>() {
            @Override
            public List<Object> execute(RedisOperations operations) throws DataAccessException {
                operations.multi();
                operations.boundHashOps("user:" + uid);
                operations.opsForHash().put("user:" + uid, "userName",
                        (null != user.getUserName() ? user.getUserName() : user.getEmail()));
                operations.opsForHash().put("user:" + uid, "email", user.getEmail());
                operations.opsForHash().put("user:" + uid, "userId", uid);
                operations.opsForHash().put("user:" + uid, "password", user.getPassword());
                operations.opsForHash().put("user:" + uid, "isActive", String.valueOf(true));
                operations.opsForHash().put("user:" + uid, "sessionKey",
                        (null != user.getSessionKey() ? user.getSessionKey() : ""));
                operations.opsForHash().put("user:" + uid, "startTime",
                        (null != user.getStartTime()
                                ? ShareokdataManager.getSimpleDateFormat().format(user.getStartTime())
                                : (ShareokdataManager.getSimpleDateFormat().format(new Date()))));

                operations.boundHashOps("users");
                operations.opsForHash().put("users", user.getEmail(), uid);

                List<Object> userList = operations.exec();
                if (!userList.get(0).equals(true)) {
                    operations.discard();
                }
                return userList;
            }
        });
    } catch (Exception ex) {
        logger.error("Cannot add new user", ex);
    }
    return user;
}

From source file:org.shareok.data.redis.job.JobDaoImpl.java

@Override
@Transactional// w  ww.j a va  2s . c om
public long startJob(long uid, int jobType, int repoType, int serverId, Date startTime) {

    long jobIdCount = -1;

    try {
        redisTemplate.setConnectionFactory(connectionFactory);

        RedisAtomicLong jobIdIndex = new RedisAtomicLong(ShareokdataManager.getRedisGlobalJobIdSchema(),
                redisTemplate.getConnectionFactory());

        jobIdCount = jobIdIndex.incrementAndGet();
        final String jobId = String.valueOf(jobIdCount);
        final String uidStr = String.valueOf(uid);
        final String jobTypeStr = String.valueOf(jobType);
        final String repoTypeStr = String.valueOf(repoType);
        final String serverIdStr = String.valueOf(serverId);
        final Date startTimeStr = startTime;

        List<Object> results = redisTemplate.execute(new SessionCallback<List<Object>>() {
            @Override
            public List<Object> execute(RedisOperations operations) throws DataAccessException {
                operations.multi();
                operations.boundHashOps("job:" + jobId);
                operations.opsForHash().put("job:" + jobId, "userId", uidStr);
                operations.opsForHash().put("job:" + jobId, "jobId", jobId);
                operations.opsForHash().put("job:" + jobId, "status", "4");
                operations.opsForHash().put("job:" + jobId, "type", jobTypeStr);
                operations.opsForHash().put("job:" + jobId, "repoType", repoTypeStr);
                operations.opsForHash().put("job:" + jobId, "startTime",
                        (null != startTimeStr ? ShareokdataManager.getSimpleDateFormat().format(startTimeStr)
                                : ShareokdataManager.getSimpleDateFormat().format(new Date())));
                operations.opsForHash().put("job:" + jobId, "endTime", "");
                operations.opsForHash().put("job:" + jobId, "serverId", serverIdStr);

                operations.boundSetOps("user_" + uidStr + "_job_set").add(jobId);

                List<Object> jobList = operations.exec();
                if (!jobList.get(0).equals(true)) {
                    operations.discard();
                }
                return jobList;
            }
        });
    } catch (Exception ex) {
        logger.error("Cannot start a new job.", ex);
    }
    return jobIdCount;
}

From source file:com.afousan.service.RetwisRepository.java

@Inject
public RetwisRepository(StringRedisTemplate template) {
    this.template = template;
    valueOps = template.opsForValue();//from ww  w .  j av a  2 s  . co  m

    users = new DefaultRedisList<String>(KeyUtils.users(), template);
    timeline = new DefaultRedisList<String>(KeyUtils.timeline(), template);
    userIdCounter = new RedisAtomicLong(KeyUtils.globalUid(), template.getConnectionFactory());
    postIdCounter = new RedisAtomicLong(KeyUtils.globalPid(), template.getConnectionFactory());
}

From source file:org.shareok.data.redis.job.JobDaoImpl.java

@Override
public RedisJob createJob(final long uid, final int jobType, final Map<String, String> values) {
    long jobIdCount = -1;
    final RedisJob newJob = new RedisJob();

    try {//from   w w w.  j  av  a  2 s  . c o  m
        redisTemplate.setConnectionFactory(connectionFactory);

        RedisAtomicLong jobIdIndex = new RedisAtomicLong(ShareokdataManager.getRedisGlobalJobIdSchema(),
                redisTemplate.getConnectionFactory());

        jobIdCount = jobIdIndex.incrementAndGet();
        final String jobId = String.valueOf(jobIdCount);
        final String uidStr = String.valueOf(uid);
        final String jobTypeStr = String.valueOf(jobType);
        final Date startTime = new Date();

        List<Object> results = redisTemplate.execute(new SessionCallback<List<Object>>() {
            @Override
            public List<Object> execute(RedisOperations operations) throws DataAccessException {
                operations.multi();
                operations.boundHashOps("job:" + jobId);
                operations.opsForHash().put("job:" + jobId, "userId", uidStr);
                operations.opsForHash().put("job:" + jobId, "jobId", jobId);
                operations.opsForHash().put("job:" + jobId, "status", "4");
                operations.opsForHash().put("job:" + jobId, "type", jobTypeStr);
                operations.opsForHash().put("job:" + jobId, "startTime",
                        ShareokdataManager.getSimpleDateFormat().format(startTime));
                operations.opsForHash().put("job:" + jobId, "endTime", "");
                if (null != values && values.size() > 0) {
                    for (String key : values.keySet()) {
                        String value = (null != values.get(key)) ? (String) values.get(key) : "";
                        operations.opsForHash().put("job:" + jobId, key, value);
                    }
                }

                operations.boundSetOps("user_" + uidStr + "_job_set").add(jobId);

                List<Object> jobList = operations.exec();
                if (!jobList.get(0).equals(true)) {
                    operations.discard();
                }
                return jobList;
            }

        });
        newJob.setJobId(jobIdCount);
        newJob.setType(jobType);
        newJob.setStartTime(startTime);
        newJob.setUserId(uid);
        newJob.setData(values);
    } catch (Exception ex) {
        logger.error("Cannot start a new job.", ex);
    }
    return newJob;
}

From source file:org.shareok.data.redis.job.JobDaoImpl.java

@Override
public RedisJob saveJob(final RedisJob job) {

    long jobIdCount = -1;

    try {/*from w w  w. ja  v  a 2s.c o m*/
        redisTemplate.setConnectionFactory(connectionFactory);

        RedisAtomicLong jobIdIndex = new RedisAtomicLong(ShareokdataManager.getRedisGlobalJobIdSchema(),
                redisTemplate.getConnectionFactory());

        jobIdCount = jobIdIndex.incrementAndGet();
        final String jobId = String.valueOf(jobIdCount);
        job.setJobId(jobIdCount);

        final Field[] fields = job.getClass().getDeclaredFields();
        final Field[] parentFields;
        Class parent = job.getClass().getSuperclass();
        if (null != parent) {
            parentFields = parent.getDeclaredFields();
        } else {
            parentFields = null;
        }

        List<Object> results = redisTemplate.execute(new SessionCallback<List<Object>>() {
            @Override
            public List<Object> execute(RedisOperations operations) throws DataAccessException {

                operations.multi();
                operations.boundHashOps("job:" + jobId);

                try {
                    for (Field field : fields) {
                        String key = field.getName();
                        field.setAccessible(true);
                        Object val = field.get(job);
                        String value = "";
                        if (null != val) {
                            if (val instanceof Date) {
                                value = ShareokdataManager.getSimpleDateFormat().format((Date) val);
                                operations.opsForHash().put("job:" + jobId, key, value);
                            } else if (val instanceof Map) {
                                continue;
                            } else {
                                value = String.valueOf(val);
                                operations.opsForHash().put("job:" + jobId, key, value);
                            }
                        } else if (!(val instanceof Map)) {
                            operations.opsForHash().put("job:" + jobId, key, value);
                        }
                    }
                    if (null != parentFields) {
                        for (Field parentField : parentFields) {
                            String key = parentField.getName();
                            parentField.setAccessible(true);
                            Object val = parentField.get(job);
                            String value = "";
                            if (null != val) {
                                if (val instanceof Date) {
                                    value = ShareokdataManager.getSimpleDateFormat().format((Date) val);
                                    operations.opsForHash().put("job:" + jobId, key, value);
                                } else if (val instanceof Map) {
                                    continue;
                                } else {
                                    value = String.valueOf(val);
                                    operations.opsForHash().put("job:" + jobId, key, value);
                                }
                            } else if (!(val instanceof Map)) {
                                operations.opsForHash().put("job:" + jobId, key, value);
                            }
                        }
                    }
                } catch (IllegalArgumentException | IllegalAccessException ex) {
                    logger.error("Cannot sace a new job with illegal access to certain job field values.", ex);
                }

                operations.boundSetOps("user_" + String.valueOf(job.getUserId()) + "_job_set").add(jobId);

                List<Object> jobList = operations.exec();
                if (!jobList.get(0).equals(true)) {
                    operations.discard();
                }
                return jobList;
            }
        });
    } catch (Exception ex) {
        logger.error("Cannot save a new job.", ex);
    }
    return job;
}

From source file:org.springframework.xd.dirt.launcher.RedisContainerLauncher.java

@Override
public Container launch(ContainerOptions options) {
    synchronized (this) {
        if (this.ids == null) {
            this.ids = new RedisAtomicLong("idsequence", this.connectionFactory);
        }/*w ww .j av  a 2  s .  c  o m*/
    }
    long id = ids.incrementAndGet();
    DefaultContainer container = new DefaultContainer(id + "");
    container.start();
    logRedisInfo(container, options);
    container.addListener(new ShutdownListener(container));
    this.eventPublisher.publishEvent(new ContainerStartedEvent(container));
    return container;
}