List of usage examples for org.springframework.data.redis.core RedisOperations boundHashOps
<HK, HV> BoundHashOperations<K, HK, HV> boundHashOps(K key);
From source file:org.shareok.data.redis.UserDaoImpl.java
@Override @Transactional//from w w w . j a v a 2 s .c om public RedisUser addUser(final RedisUser user) { try { redisTemplate.setConnectionFactory(connectionFactory); RedisUser existedUser = findUserByUserEmail(user.getEmail()); if (null != existedUser) { return existedUser; } RedisAtomicLong userIdIndex = new RedisAtomicLong(ShareokdataManager.getRedisGlobalUidSchema(), redisTemplate.getConnectionFactory()); long uidCount = userIdIndex.incrementAndGet(); final String uid = String.valueOf(uidCount); List<Object> results = redisTemplate.execute(new SessionCallback<List<Object>>() { @Override public List<Object> execute(RedisOperations operations) throws DataAccessException { operations.multi(); operations.boundHashOps("user:" + uid); operations.opsForHash().put("user:" + uid, "userName", (null != user.getUserName() ? user.getUserName() : user.getEmail())); operations.opsForHash().put("user:" + uid, "email", user.getEmail()); operations.opsForHash().put("user:" + uid, "userId", uid); operations.opsForHash().put("user:" + uid, "password", user.getPassword()); operations.opsForHash().put("user:" + uid, "isActive", String.valueOf(true)); operations.opsForHash().put("user:" + uid, "sessionKey", (null != user.getSessionKey() ? user.getSessionKey() : "")); operations.opsForHash().put("user:" + uid, "startTime", (null != user.getStartTime() ? ShareokdataManager.getSimpleDateFormat().format(user.getStartTime()) : (ShareokdataManager.getSimpleDateFormat().format(new Date())))); operations.boundHashOps("users"); operations.opsForHash().put("users", user.getEmail(), uid); List<Object> userList = operations.exec(); if (!userList.get(0).equals(true)) { operations.discard(); } return userList; } }); } catch (Exception ex) { logger.error("Cannot add new user", ex); } return user; }
From source file:org.shareok.data.redis.job.JobDaoImpl.java
@Override @Transactional// ww w . ja va 2s. c om public long startJob(long uid, int jobType, int repoType, int serverId, Date startTime) { long jobIdCount = -1; try { redisTemplate.setConnectionFactory(connectionFactory); RedisAtomicLong jobIdIndex = new RedisAtomicLong(ShareokdataManager.getRedisGlobalJobIdSchema(), redisTemplate.getConnectionFactory()); jobIdCount = jobIdIndex.incrementAndGet(); final String jobId = String.valueOf(jobIdCount); final String uidStr = String.valueOf(uid); final String jobTypeStr = String.valueOf(jobType); final String repoTypeStr = String.valueOf(repoType); final String serverIdStr = String.valueOf(serverId); final Date startTimeStr = startTime; List<Object> results = redisTemplate.execute(new SessionCallback<List<Object>>() { @Override public List<Object> execute(RedisOperations operations) throws DataAccessException { operations.multi(); operations.boundHashOps("job:" + jobId); operations.opsForHash().put("job:" + jobId, "userId", uidStr); operations.opsForHash().put("job:" + jobId, "jobId", jobId); operations.opsForHash().put("job:" + jobId, "status", "4"); operations.opsForHash().put("job:" + jobId, "type", jobTypeStr); operations.opsForHash().put("job:" + jobId, "repoType", repoTypeStr); operations.opsForHash().put("job:" + jobId, "startTime", (null != startTimeStr ? ShareokdataManager.getSimpleDateFormat().format(startTimeStr) : ShareokdataManager.getSimpleDateFormat().format(new Date()))); operations.opsForHash().put("job:" + jobId, "endTime", ""); operations.opsForHash().put("job:" + jobId, "serverId", serverIdStr); operations.boundSetOps("user_" + uidStr + "_job_set").add(jobId); List<Object> jobList = operations.exec(); if (!jobList.get(0).equals(true)) { operations.discard(); } return jobList; } }); } catch (Exception ex) { logger.error("Cannot start a new job.", ex); } return jobIdCount; }
From source file:org.shareok.data.redis.job.JobDaoImpl.java
@Override public RedisJob createJob(final long uid, final int jobType, final Map<String, String> values) { long jobIdCount = -1; final RedisJob newJob = new RedisJob(); try {//from www . j a v a 2s .c o m redisTemplate.setConnectionFactory(connectionFactory); RedisAtomicLong jobIdIndex = new RedisAtomicLong(ShareokdataManager.getRedisGlobalJobIdSchema(), redisTemplate.getConnectionFactory()); jobIdCount = jobIdIndex.incrementAndGet(); final String jobId = String.valueOf(jobIdCount); final String uidStr = String.valueOf(uid); final String jobTypeStr = String.valueOf(jobType); final Date startTime = new Date(); List<Object> results = redisTemplate.execute(new SessionCallback<List<Object>>() { @Override public List<Object> execute(RedisOperations operations) throws DataAccessException { operations.multi(); operations.boundHashOps("job:" + jobId); operations.opsForHash().put("job:" + jobId, "userId", uidStr); operations.opsForHash().put("job:" + jobId, "jobId", jobId); operations.opsForHash().put("job:" + jobId, "status", "4"); operations.opsForHash().put("job:" + jobId, "type", jobTypeStr); operations.opsForHash().put("job:" + jobId, "startTime", ShareokdataManager.getSimpleDateFormat().format(startTime)); operations.opsForHash().put("job:" + jobId, "endTime", ""); if (null != values && values.size() > 0) { for (String key : values.keySet()) { String value = (null != values.get(key)) ? (String) values.get(key) : ""; operations.opsForHash().put("job:" + jobId, key, value); } } operations.boundSetOps("user_" + uidStr + "_job_set").add(jobId); List<Object> jobList = operations.exec(); if (!jobList.get(0).equals(true)) { operations.discard(); } return jobList; } }); newJob.setJobId(jobIdCount); newJob.setType(jobType); newJob.setStartTime(startTime); newJob.setUserId(uid); newJob.setData(values); } catch (Exception ex) { logger.error("Cannot start a new job.", ex); } return newJob; }
From source file:org.shareok.data.redis.job.JobDaoImpl.java
@Override public RedisJob saveJob(final RedisJob job) { long jobIdCount = -1; try {/* w ww . j ava2 s. co m*/ redisTemplate.setConnectionFactory(connectionFactory); RedisAtomicLong jobIdIndex = new RedisAtomicLong(ShareokdataManager.getRedisGlobalJobIdSchema(), redisTemplate.getConnectionFactory()); jobIdCount = jobIdIndex.incrementAndGet(); final String jobId = String.valueOf(jobIdCount); job.setJobId(jobIdCount); final Field[] fields = job.getClass().getDeclaredFields(); final Field[] parentFields; Class parent = job.getClass().getSuperclass(); if (null != parent) { parentFields = parent.getDeclaredFields(); } else { parentFields = null; } List<Object> results = redisTemplate.execute(new SessionCallback<List<Object>>() { @Override public List<Object> execute(RedisOperations operations) throws DataAccessException { operations.multi(); operations.boundHashOps("job:" + jobId); try { for (Field field : fields) { String key = field.getName(); field.setAccessible(true); Object val = field.get(job); String value = ""; if (null != val) { if (val instanceof Date) { value = ShareokdataManager.getSimpleDateFormat().format((Date) val); operations.opsForHash().put("job:" + jobId, key, value); } else if (val instanceof Map) { continue; } else { value = String.valueOf(val); operations.opsForHash().put("job:" + jobId, key, value); } } else if (!(val instanceof Map)) { operations.opsForHash().put("job:" + jobId, key, value); } } if (null != parentFields) { for (Field parentField : parentFields) { String key = parentField.getName(); parentField.setAccessible(true); Object val = parentField.get(job); String value = ""; if (null != val) { if (val instanceof Date) { value = ShareokdataManager.getSimpleDateFormat().format((Date) val); operations.opsForHash().put("job:" + jobId, key, value); } else if (val instanceof Map) { continue; } else { value = String.valueOf(val); operations.opsForHash().put("job:" + jobId, key, value); } } else if (!(val instanceof Map)) { operations.opsForHash().put("job:" + jobId, key, value); } } } } catch (IllegalArgumentException | IllegalAccessException ex) { logger.error("Cannot sace a new job with illegal access to certain job field values.", ex); } operations.boundSetOps("user_" + String.valueOf(job.getUserId()) + "_job_set").add(jobId); List<Object> jobList = operations.exec(); if (!jobList.get(0).equals(true)) { operations.discard(); } return jobList; } }); } catch (Exception ex) { logger.error("Cannot save a new job.", ex); } return job; }
From source file:org.shareok.data.redis.server.RepoServerDaoImpl.java
@Override public RepoServer updateServer(RepoServer server) { try {/*from ww w . ja va 2s . c om*/ redisTemplate.setConnectionFactory(connectionFactory); final String serverId = String.valueOf(server.getServerId()); final String serverName = server.getServerName(); RepoServer existingServer = findServerById(server.getServerId()); if (null == existingServer) { throw new NonExistingServerException("The server to be updated does not exist!"); } final String oldServerName = existingServer.getServerName(); final String portStr = String.valueOf(server.getPort()); final String proxyPortStr = String.valueOf(server.getProxyPort()); final String repoTypeStr = String.valueOf(server.getRepoType()); final String timeoutStr = String.valueOf(server.getTimeout()); final String host = server.getHost(); final String proxyHost = server.getProxyHost(); final String userName = server.getUserName(); final String proxyUserName = server.getProxyUserName(); final String password = server.getPassword(); final String proxyPassword = server.getProxyPassword(); final String passPhrase = server.getPassPhrase(); final String rsaKey = server.getRsaKey(); final String address = server.getAddress(); List<Object> results = redisTemplate.execute(new SessionCallback<List<Object>>() { @Override public List<Object> execute(RedisOperations operations) throws DataAccessException { operations.multi(); operations.boundHashOps("server:" + serverId); // operations.opsForHash().put("server:"+serverId, "serverId", serverId); operations.opsForHash().put("server:" + serverId, "serverName", serverName); operations.opsForHash().put("server:" + serverId, "port", portStr); operations.opsForHash().put("server:" + serverId, "proxyPort", proxyPortStr); operations.opsForHash().put("server:" + serverId, "timeout", timeoutStr); operations.opsForHash().put("server:" + serverId, "host", host); operations.opsForHash().put("server:" + serverId, "proxyHost", proxyHost); operations.opsForHash().put("server:" + serverId, "userName", userName); operations.opsForHash().put("server:" + serverId, "proxyUserName", proxyUserName); operations.opsForHash().put("server:" + serverId, "password", password); operations.opsForHash().put("server:" + serverId, "host", host); operations.opsForHash().put("server:" + serverId, "proxyPassword", proxyPassword); operations.opsForHash().put("server:" + serverId, "passPhrase", passPhrase); operations.opsForHash().put("server:" + serverId, "rsaKey", rsaKey); operations.opsForHash().put("server:" + serverId, "repoType", repoTypeStr); operations.opsForHash().put("server:" + serverId, "address", address); operations.boundHashOps(ShareokdataManager.getRedisServerNameIdMatchingTable()); if (!oldServerName.equals(serverName)) { operations.opsForHash().delete(ShareokdataManager.getRedisServerNameIdMatchingTable(), oldServerName); } operations.opsForHash().put(ShareokdataManager.getRedisServerNameIdMatchingTable(), serverName, serverId); List<Object> serverList = operations.exec(); if (serverList.get(0).equals("null")) { operations.discard(); } return serverList; } }); } catch (Exception ex) { logger.error("Cannot update the server information", ex); } return server; }
From source file:org.shareok.data.redis.server.RepoServerDaoImpl.java
@Override public RepoServer addServer(RepoServer server) { int serverIdCount = -1; try {/*from w w w . j a v a 2 s . co m*/ redisTemplate.setConnectionFactory(connectionFactory); RedisAtomicInteger serverIdIndex = new RedisAtomicInteger( ShareokdataManager.getRedisServerQueryPrefix(), redisTemplate.getConnectionFactory()); serverIdCount = serverIdIndex.incrementAndGet(); final String serverId = String.valueOf(serverIdCount); final String serverName = server.getServerName(); final String portStr = String.valueOf(server.getPort()); final String proxyPortStr = String.valueOf(server.getProxyPort()); final String repoTypeStr = String.valueOf(server.getRepoType()); final String timeoutStr = String.valueOf(server.getTimeout()); final String host = server.getHost(); final String proxyHost = server.getProxyHost(); final String userName = server.getUserName(); final String proxyUserName = server.getProxyUserName(); final String password = server.getPassword(); final String proxyPassword = server.getProxyPassword(); final String passphrase = server.getPassPhrase(); final String rsaKey = server.getRsaKey(); final String address = server.getAddress(); RepoServer existingServer = findServerByName(serverName); if (null != existingServer) { return existingServer; } List<Object> results = redisTemplate.execute(new SessionCallback<List<Object>>() { @Override public List<Object> execute(RedisOperations operations) throws DataAccessException { operations.multi(); operations.boundHashOps("server:" + serverId); operations.opsForHash().put("server:" + serverId, "serverId", serverId); operations.opsForHash().put("server:" + serverId, "serverName", serverName); operations.opsForHash().put("server:" + serverId, "port", portStr); operations.opsForHash().put("server:" + serverId, "proxyPort", proxyPortStr); operations.opsForHash().put("server:" + serverId, "timeout", timeoutStr); operations.opsForHash().put("server:" + serverId, "host", host); operations.opsForHash().put("server:" + serverId, "proxyHost", proxyHost); operations.opsForHash().put("server:" + serverId, "userName", userName); operations.opsForHash().put("server:" + serverId, "proxyUserName", proxyUserName); operations.opsForHash().put("server:" + serverId, "password", password); operations.opsForHash().put("server:" + serverId, "host", host); operations.opsForHash().put("server:" + serverId, "proxyPassword", proxyPassword); operations.opsForHash().put("server:" + serverId, "passphrase", passphrase); operations.opsForHash().put("server:" + serverId, "rsaKey", rsaKey); operations.opsForHash().put("server:" + serverId, "repoType", repoTypeStr); operations.opsForHash().put("server:" + serverId, "address", address); operations.boundHashOps(ShareokdataManager.getRedisServerNameIdMatchingTable()); operations.opsForHash().put(ShareokdataManager.getRedisServerNameIdMatchingTable(), serverName, serverId); List<Object> serverList = operations.exec(); if (!serverList.get(0).equals(true)) { operations.discard(); } return serverList; } }); server.setServerId(serverIdCount); return server; } catch (Exception ex) { logger.error("Cannot create a new server.", ex); return null; } }