List of usage examples for org.apache.commons.lang SerializationUtils deserialize
public static Object deserialize(byte[] objectData)
Deserializes a single Object
from an array of bytes.
From source file:com.nflabs.shiro.cache.zookeeper.ZookeeperCache.java
@SuppressWarnings("unchecked") private V getData(K key) { try {/* w ww .j av a2 s. c o m*/ byte[] result = zookeeperClient.getData(getDataPath(key), false, new Stat()); if (result == null) { return null; } return (V) SerializationUtils.deserialize(result); } catch (KeeperException | InterruptedException e) { //throw new CacheException(e); LOG.error("Error: {}", e.getMessage()); return null; } }
From source file:com.impetus.ankush.common.domain.NodeMonitoring.java
/** * Gets the technology data.// w w w.ja v a2 s. co m * * @return the technologyData */ @Transient public Map<String, TechnologyData> getTechnologyData() { if (getTechnologyDataBytes() == null) { return null; } return (Map<String, TechnologyData>) SerializationUtils.deserialize(getTechnologyDataBytes()); }
From source file:es.uvigo.ei.sing.gc.model.entities.ExpertResult.java
@Transient public Throwable getAbortCause() { if (this.abortCause == null) { return null; } else {/*from w w w . ja v a 2 s . c o m*/ return (Throwable) SerializationUtils.deserialize(this.abortCause); } }
From source file:co.cask.cdap.internal.app.runtime.schedule.DataSetBasedScheduleStore.java
private TriggerStatus readTrigger(TriggerKey key) throws Exception { byte[][] col = new byte[1][]; col[0] = Bytes.toBytes(key.getName()); Map<byte[], byte[]> result = table.get(TRIGGER_KEY, col); byte[] bytes = null; if (!result.isEmpty()) { bytes = result.get(col[0]);//from w w w. ja v a 2 s. com } if (bytes != null) { return (TriggerStatus) SerializationUtils.deserialize(bytes); } else { return null; } }
From source file:co.cask.cdap.internal.app.runtime.schedule.store.DatasetBasedTimeScheduleStore.java
private TriggerStatus readTrigger(TriggerKey key) throws Exception { byte[][] col = new byte[1][]; col[0] = Bytes.toBytes(key.getName()); Row result = table.get(TRIGGER_KEY, col); byte[] bytes = null; if (!result.isEmpty()) { bytes = result.get(col[0]);/* ww w . ja v a 2 s .c o m*/ } if (bytes != null) { return (TriggerStatus) SerializationUtils.deserialize(bytes); } else { return null; } }
From source file:io.tilt.minka.spectator.PublishSubscribeQueue.java
private void bytesToMessage(final Stat childStat, final List<MessageMetadata> datas, final String childNode, final byte[] in) { try {//w w w . ja va 2 s.c om Object o = SerializationUtils.deserialize(in); if (o != null) { datas.add((MessageMetadata) o); } } catch (Exception e) { logger.warn( "{}: ({}) Deserializing error while reading on queue: {} for " + "new message: {}, size: {}, created: {}", getClass().getSimpleName(), logId, name, childNode, childStat.getDataLength(), childStat.getCtime()); } }
From source file:com.janrain.backplane2.server.dao.redis.RedisBackplaneMessageDAO.java
@Override public List<BackplaneMessage> retrieveMessagesNoScope(@Nullable String sinceIso8601timestamp) throws BackplaneServerException { Jedis jedis = null;// w w w. j a v a2 s. co m try { jedis = Redis.getInstance().getReadJedis(); double sinceInMs = 0; if (StringUtils.isNotBlank(sinceIso8601timestamp)) { sinceInMs = BackplaneMessage.getDateFromId(sinceIso8601timestamp).getTime(); } // every message has a unique timestamp - which serves as a key for indexing Set<byte[]> messageIdBytes = jedis.zrangeByScore(V2_MESSAGES.getBytes(), sinceInMs + 1, Double.POSITIVE_INFINITY); List<BackplaneMessage> messages = new ArrayList<BackplaneMessage>(); Pipeline pipeline = jedis.pipelined(); List<Response<byte[]>> responses = new ArrayList<Response<byte[]>>(); if (messageIdBytes != null) { for (byte[] b : messageIdBytes) { String[] args = new String(b).split(" "); byte[] key = getKey(args[2]); responses.add(pipeline.get(key)); } pipeline.sync(); for (Response<byte[]> response : responses) { if (response.get() != null) { BackplaneMessage backplaneMessage = (BackplaneMessage) SerializationUtils .deserialize(response.get()); messages.add(backplaneMessage); } else { logger.warn("failed to retrieve a message"); } } } Collections.sort(messages, new Comparator<BackplaneMessage>() { @Override public int compare(BackplaneMessage backplaneMessage, BackplaneMessage backplaneMessage1) { return backplaneMessage.getIdValue().compareTo(backplaneMessage1.getIdValue()); } }); return messages; } finally { Redis.getInstance().releaseToPool(jedis); } }
From source file:com.ephesoft.gxt.admin.server.ImportBatchClassUploadServlet.java
private void attachFile(HttpServletRequest req, HttpServletResponse resp, BatchSchemaService batchSchemaService, BatchClassService bcService, ImportBatchService imService) throws IOException { PrintWriter printWriter = resp.getWriter(); File tempZipFile = null;/*from ww w. j av a 2 s . c om*/ InputStream instream = null; OutputStream out = null; String zipWorkFlowName = "", tempOutputUnZipDir = "", zipWorkflowDesc = "", zipWorkflowPriority = ""; BatchClass importBatchClass = null; if (ServletFileUpload.isMultipartContent(req)) { FileItemFactory factory = new DiskFileItemFactory(); ServletFileUpload upload = new ServletFileUpload(factory); String exportSerailizationFolderPath = batchSchemaService.getBatchExportFolderLocation(); File exportSerailizationFolder = new File(exportSerailizationFolderPath); if (!exportSerailizationFolder.exists()) { exportSerailizationFolder.mkdir(); } String zipFileName = ""; String zipPathname = ""; List<FileItem> items; try { items = upload.parseRequest(req); for (FileItem item : items) { if (!item.isFormField()) {//&& "importFile".equals(item.getFieldName())) { zipFileName = item.getName(); if (zipFileName != null) { zipFileName = zipFileName.substring(zipFileName.lastIndexOf(File.separator) + 1); } zipPathname = exportSerailizationFolderPath + File.separator + zipFileName; // get only the file name not whole path if (zipFileName != null) { zipFileName = FilenameUtils.getName(zipFileName); } try { instream = item.getInputStream(); tempZipFile = new File(zipPathname); if (tempZipFile.exists()) { tempZipFile.delete(); } out = new FileOutputStream(tempZipFile); byte buf[] = new byte[1024]; int len; while ((len = instream.read(buf)) > 0) { out.write(buf, 0, len); } } catch (FileNotFoundException e) { log.error("Unable to create the export folder." + e, e); printWriter.write("Unable to create the export folder.Please try again."); } catch (IOException e) { log.error("Unable to read the file." + e, e); printWriter.write("Unable to read the file.Please try again."); } finally { if (out != null) { try { out.close(); } catch (IOException ioe) { log.info("Could not close stream for file." + tempZipFile); } } if (instream != null) { try { instream.close(); } catch (IOException ioe) { log.info("Could not close stream for file." + zipFileName); } } } } } } catch (FileUploadException e) { log.error("Unable to read the form contents." + e, e); printWriter.write("Unable to read the form contents.Please try again."); } tempOutputUnZipDir = exportSerailizationFolderPath + File.separator + zipFileName.substring(0, zipFileName.lastIndexOf('.')) + System.nanoTime(); try { FileUtils.unzip(tempZipFile, tempOutputUnZipDir); } catch (Exception e) { log.error("Unable to unzip the file." + e, e); printWriter.write("Unable to unzip the file.Please try again."); tempZipFile.delete(); } String serializableFilePath = FileUtils.getFileNameOfTypeFromFolder(tempOutputUnZipDir, SERIALIZATION_EXT); InputStream serializableFileStream = null; try { serializableFileStream = new FileInputStream(serializableFilePath); importBatchClass = (BatchClass) SerializationUtils.deserialize(serializableFileStream); zipWorkFlowName = importBatchClass.getName(); zipWorkflowDesc = importBatchClass.getDescription(); zipWorkflowPriority = "" + importBatchClass.getPriority(); } catch (Exception e) { tempZipFile.delete(); log.error("Error while importing" + e, e); printWriter.write("Error while importing.Please try again."); } finally { if (serializableFileStream != null) { try { serializableFileStream.close(); } catch (IOException ioe) { log.info("Could not close stream for file." + serializableFilePath); } } } } else { log.error("Request contents type is not supported."); printWriter.write("Request contents type is not supported."); } if (tempZipFile != null) { tempZipFile.delete(); } List<String> uncList = bcService.getAssociatedUNCList(zipWorkFlowName); DeploymentService deploymentService = this.getSingleBeanOfType(DeploymentService.class); boolean isWorkflowDeployed = deploymentService.isDeployed(zipWorkFlowName); if (null != importBatchClass) { boolean isWorkflowEqual = imService.isImportWorkflowEqualDeployedWorkflow(importBatchClass, importBatchClass.getName()); printWriter.write(AdminSharedConstants.WORK_FLOW_NAME + zipWorkFlowName); printWriter.append("|"); printWriter.write(AdminSharedConstants.WORK_FLOW_DESC + zipWorkflowDesc); printWriter.append("|"); printWriter.write(AdminSharedConstants.WORK_FLOW_PRIORITY + zipWorkflowPriority); printWriter.append("|"); printWriter.append(AdminSharedConstants.FILE_PATH).append(tempOutputUnZipDir); printWriter.append("|"); printWriter.write(AdminSharedConstants.WORKFLOW_DEPLOYED + isWorkflowDeployed); printWriter.append("|"); printWriter.write(AdminSharedConstants.WORKFLOW_EQUAL + isWorkflowEqual); printWriter.append("|"); printWriter.write(AdminSharedConstants.WORKFLOW_EXIST_IN_BATCH_CLASS + ((uncList == null || uncList.size() == 0) ? false : true)); printWriter.append("|"); } printWriter.flush(); }
From source file:com.impetus.ankush.common.domain.Cluster.java
/** * Gets the alerts conf./*from w ww . j a v a 2 s . c o m*/ * * @return the object */ @Transient @JsonIgnore public AlertsConf getAlertsConf() { if (getAnConfBytes() == null) { return null; } return (AlertsConf) SerializationUtils.deserialize(getAnConfBytes()); }
From source file:com.ephesoft.dcma.gwt.admin.bm.server.ImportBatchClassUploadServlet.java
private void attachFile(HttpServletRequest req, HttpServletResponse resp, BatchSchemaService batchSchemaService, DeploymentService deploymentService, BatchClassService bcService, ImportBatchService imService) throws IOException { PrintWriter printWriter = resp.getWriter(); File tempZipFile = null;/*from ww w . j ava2 s . c o m*/ InputStream instream = null; OutputStream out = null; String zipWorkFlowName = BatchClassManagementConstants.EMPTY_STRING, tempOutputUnZipDir = BatchClassManagementConstants.EMPTY_STRING, systemFolderPath = BatchClassManagementConstants.EMPTY_STRING; BatchClass importBatchClass = null; if (ServletFileUpload.isMultipartContent(req)) { FileItemFactory factory = new DiskFileItemFactory(); ServletFileUpload upload = new ServletFileUpload(factory); String exportSerailizationFolderPath = batchSchemaService.getBatchExportFolderLocation(); File exportSerailizationFolder = new File(exportSerailizationFolderPath); if (!exportSerailizationFolder.exists()) { exportSerailizationFolder.mkdir(); } String zipFileName = BatchClassManagementConstants.EMPTY_STRING; String zipPathname = BatchClassManagementConstants.EMPTY_STRING; List<FileItem> items; try { items = (List<FileItem>) upload.parseRequest(req); for (FileItem item : items) { if (!item.isFormField() && "importFile".equals(item.getFieldName())) { zipFileName = item.getName(); if (zipFileName != null) { zipFileName = zipFileName.substring(zipFileName.lastIndexOf(File.separator) + 1); } zipPathname = exportSerailizationFolderPath + File.separator + zipFileName; if (zipFileName != null) { zipFileName = FilenameUtils.getName(zipFileName); } try { instream = item.getInputStream(); tempZipFile = new File(zipPathname); if (tempZipFile.exists()) { tempZipFile.delete(); } out = new FileOutputStream(tempZipFile); byte buf[] = new byte[BatchClassManagementConstants.BUFFER_SIZE]; int len = instream.read(buf); while ((len) > 0) { out.write(buf, 0, len); len = instream.read(buf); } } catch (FileNotFoundException e) { LOG.error("Unable to create the export folder." + e, e); printWriter.write("Unable to create the export folder.Please try again."); } catch (IOException e) { LOG.error("Unable to read the file." + e, e); printWriter.write("Unable to read the file.Please try again."); } finally { IOUtils.closeQuietly(out); IOUtils.closeQuietly(instream); } } } } catch (FileUploadException e) { LOG.error("Unable to read the form contents." + e, e); printWriter.write("Unable to read the form contents.Please try again."); } tempOutputUnZipDir = exportSerailizationFolderPath + File.separator + zipFileName.substring(0, zipFileName.lastIndexOf('.')); try { FileUtils.unzip(tempZipFile, tempOutputUnZipDir); } catch (Exception e) { LOG.error("Unable to unzip the file." + e, e); printWriter.write("Unable to unzip the file.Please try again."); tempZipFile.delete(); } String serializableFilePath = FileUtils.getFileNameOfTypeFromFolder(tempOutputUnZipDir, SERIALIZATION_EXT); InputStream serializableFileStream = null; try { serializableFileStream = new FileInputStream(serializableFilePath); importBatchClass = (BatchClass) SerializationUtils.deserialize(serializableFileStream); zipWorkFlowName = importBatchClass.getName(); systemFolderPath = importBatchClass.getSystemFolder(); if (systemFolderPath == null) { systemFolderPath = BatchClassManagementConstants.EMPTY_STRING; } } catch (Exception e) { tempZipFile.delete(); LOG.error("Error while importing" + e, e); printWriter.write("Error while importing.Please try again."); } finally { IOUtils.closeQuietly(serializableFileStream); } } else { LOG.error("Request contents type is not supported."); printWriter.write("Request contents type is not supported."); } if (tempZipFile != null) { tempZipFile.delete(); } List<String> uncList = bcService.getAssociatedUNCList(zipWorkFlowName); boolean isWorkflowDeployed = deploymentService.isDeployed(zipWorkFlowName); boolean isWorkflowEqual = imService.isImportWorkflowEqualDeployedWorkflow(importBatchClass, importBatchClass.getName()); printWriterMethod(printWriter, zipWorkFlowName, tempOutputUnZipDir, systemFolderPath, uncList, isWorkflowDeployed, isWorkflowEqual); }