List of usage examples for org.apache.commons.lang SerializationUtils deserialize
public static Object deserialize(byte[] objectData)
Deserializes a single Object
from an array of bytes.
From source file:org.apache.ojb.broker.metadata.FieldDescriptor.java
public Object clone() { return SerializationUtils.deserialize(SerializationUtils.serialize(this)); }
From source file:org.apache.ojb.broker.metadata.PersistentFieldTest.java
private void runFieldTestsFor(Class targetClass, boolean supportJavaBeanNames) throws Exception { ((OjbConfiguration) OjbConfigurator.getInstance().getConfigurationFor(null)) .setPersistentFieldClass(targetClass); PersistentField pfNM_Name = newInstance(targetClass, NestedMain.class, NESTED_MAIN_NAME); PersistentField pfNDD_RD = newInstance(targetClass, NestedMain.class, NESTED_DETAIL_DETAIL_REAL_DETAIL); PersistentField pfNDD_RDD = newInstance(targetClass, NestedMain.class, NESTED_DETAIL_DETAIL_REAL_DESCRIPTION); PersistentField pfND_MJB = null;// ww w. j a v a 2s .c o m PersistentField pfNE_Name = null; if (supportJavaBeanNames) { pfND_MJB = newInstance(targetClass, NestedMain.class, NESTED_DETAIL_MORE_JAVA_BEAN); pfNE_Name = newInstance(targetClass, NestedMain.class, NESTED_ENTRY_NAME); } // test getter NestedMain nm = createNestedObject(); Object result = pfNM_Name.get(nm); assertEquals(NESTED_MAIN_NAME_VALUE, result); result = pfNDD_RD.get(nm); assertEquals(NESTED_DETAIL_DETAIL_REAL_DETAIL_VALUE, result); result = pfNDD_RDD.get(nm); assertEquals(NESTED_DETAIL_DETAIL_REAL_DESCRIPTION_VALUE, result); if (supportJavaBeanNames) { result = pfND_MJB.get(nm); assertEquals(NESTED_DETAIL_MORE_JAVA_BEAN_VALUE, result); result = pfNE_Name.get(nm); assertEquals(NESTED_ENTRY_NAME_VALUE, result); } NestedMain newNM = new NestedMain(); // test setter pfNM_Name.set(newNM, NESTED_MAIN_NAME_VALUE); pfNDD_RD.set(newNM, NESTED_DETAIL_DETAIL_REAL_DETAIL_VALUE); result = pfNDD_RDD.get(newNM); assertEquals(NESTED_DETAIL_DETAIL_REAL_DESCRIPTION_VALUE, result); result = pfNM_Name.get(newNM); assertEquals(NESTED_MAIN_NAME_VALUE, result); result = pfNDD_RD.get(newNM); assertEquals(NESTED_DETAIL_DETAIL_REAL_DETAIL_VALUE, result); if (supportJavaBeanNames) { pfND_MJB.set(newNM, NESTED_DETAIL_MORE_JAVA_BEAN_VALUE); pfNE_Name.set(newNM, NESTED_ENTRY_NAME_VALUE); result = pfND_MJB.get(newNM); assertEquals(NESTED_DETAIL_MORE_JAVA_BEAN_VALUE, result); result = pfNE_Name.get(newNM); assertEquals(NESTED_ENTRY_NAME_VALUE, result); } // serialize fields and test again pfNM_Name = (PersistentField) SerializationUtils.deserialize(SerializationUtils.serialize(pfNM_Name)); pfNDD_RD = (PersistentField) SerializationUtils.deserialize(SerializationUtils.serialize(pfNDD_RD)); pfNDD_RDD = (PersistentField) SerializationUtils.deserialize(SerializationUtils.serialize(pfNDD_RDD)); if (supportJavaBeanNames) { pfND_MJB = (PersistentField) SerializationUtils.deserialize(SerializationUtils.serialize(pfND_MJB)); pfNE_Name = (PersistentField) SerializationUtils.deserialize(SerializationUtils.serialize(pfNE_Name)); } // test getter nm = createNestedObject(); result = pfNM_Name.get(nm); assertEquals(NESTED_MAIN_NAME_VALUE, result); result = pfNDD_RD.get(nm); assertEquals(NESTED_DETAIL_DETAIL_REAL_DETAIL_VALUE, result); result = pfNDD_RDD.get(nm); assertEquals(NESTED_DETAIL_DETAIL_REAL_DESCRIPTION_VALUE, result); if (supportJavaBeanNames) { result = pfND_MJB.get(nm); assertEquals(NESTED_DETAIL_MORE_JAVA_BEAN_VALUE, result); result = pfNE_Name.get(nm); assertEquals(NESTED_ENTRY_NAME_VALUE, result); } newNM = new NestedMain(); // test setter pfNM_Name.set(newNM, NESTED_MAIN_NAME_VALUE); pfNDD_RD.set(newNM, NESTED_DETAIL_DETAIL_REAL_DETAIL_VALUE); result = pfNDD_RDD.get(newNM); assertEquals(NESTED_DETAIL_DETAIL_REAL_DESCRIPTION_VALUE, result); result = pfNM_Name.get(newNM); assertEquals(NESTED_MAIN_NAME_VALUE, result); result = pfNDD_RD.get(newNM); assertEquals(NESTED_DETAIL_DETAIL_REAL_DETAIL_VALUE, result); if (supportJavaBeanNames) { pfND_MJB.set(newNM, NESTED_DETAIL_MORE_JAVA_BEAN_VALUE); pfNE_Name.set(newNM, NESTED_ENTRY_NAME_VALUE); result = pfND_MJB.get(newNM); assertEquals(NESTED_DETAIL_MORE_JAVA_BEAN_VALUE, result); result = pfNE_Name.get(newNM); assertEquals(NESTED_ENTRY_NAME_VALUE, result); } }
From source file:org.apache.ojb.broker.metadata.RepositoryPersistor.java
protected DescriptorRepository deserialize(File serFile) { DescriptorRepository result = null;/* w ww . j a v a 2 s . c o m*/ try { FileInputStream fis = new FileInputStream(serFile); // deserialize repository result = (DescriptorRepository) SerializationUtils.deserialize(fis); } catch (Exception e) { log.error("Deserialisation failed, using input path: " + serFile.getAbsolutePath(), e); } return result; }
From source file:org.apache.oodt.cas.resource.util.MesosUtilities.java
/** * Build a JobSpec from a ByteString off the wire * @param data - ByteString to deserialize * @return newly minted JobSpec//w ww. j a va 2 s . co m * @throws IllegalAccessException * @throws InstantiationException * @throws ClassNotFoundException */ public static JobSpec byteStringToJobSpec(ByteString data) throws ClassNotFoundException, InstantiationException, IllegalAccessException { return ((JobSpecSerializer) SerializationUtils.deserialize(data.toByteArray())).getJobSpec(); }
From source file:org.apache.reef.runtime.common.utils.DefaultExceptionCodec.java
@Override public Optional<Throwable> fromBytes(final byte[] bytes) { try {/*from w w w . jav a 2s.c om*/ return Optional.<Throwable>of((Throwable) SerializationUtils.deserialize(bytes)); } catch (SerializationException | IllegalArgumentException e) { LOG.log(Level.FINE, "Unable to deserialize a Throwable.", e); return Optional.empty(); } }
From source file:org.apache.reef.vortex.common.VortexAvroUtils.java
/** * Deserialize byte array to VortexRequest. * @param bytes Byte array to deserialize. * @return De-serialized VortexRequest./*from w w w .j a v a2 s . c om*/ */ public VortexRequest toVortexRequest(final byte[] bytes) { final AvroVortexRequest avroVortexRequest = toAvroObject(bytes, AvroVortexRequest.class); final VortexRequest vortexRequest; switch (avroVortexRequest.getRequestType()) { case AggregateExecute: final AvroTaskletAggregateExecutionRequest taskletAggregateExecutionRequest = (AvroTaskletAggregateExecutionRequest) avroVortexRequest .getTaskletRequest(); vortexRequest = new TaskletAggregateExecutionRequest<>(taskletAggregateExecutionRequest.getTaskletId(), taskletAggregateExecutionRequest.getAggregateFunctionId(), aggregateFunctionRepository .getFunction(taskletAggregateExecutionRequest.getAggregateFunctionId()).getInputCodec() .decode(taskletAggregateExecutionRequest.getSerializedInput().array())); break; case Aggregate: final AvroTaskletAggregationRequest taskletAggregationRequest = (AvroTaskletAggregationRequest) avroVortexRequest .getTaskletRequest(); final VortexAggregateFunction aggregateFunction = (VortexAggregateFunction) SerializationUtils .deserialize(taskletAggregationRequest.getSerializedAggregateFunction().array()); final VortexFunction functionForAggregation = (VortexFunction) SerializationUtils .deserialize(taskletAggregationRequest.getSerializedUserFunction().array()); final VortexAggregatePolicy policy = (VortexAggregatePolicy) SerializationUtils .deserialize(taskletAggregationRequest.getSerializedPolicy().array()); vortexRequest = new TaskletAggregationRequest<>(taskletAggregationRequest.getAggregateFunctionId(), aggregateFunction, functionForAggregation, policy); break; case ExecuteTasklet: final AvroTaskletExecutionRequest taskletExecutionRequest = (AvroTaskletExecutionRequest) avroVortexRequest .getTaskletRequest(); // TODO[REEF-1003]: Use reflection instead of serialization when launching VortexFunction final VortexFunction function = (VortexFunction) SerializationUtils .deserialize(taskletExecutionRequest.getSerializedUserFunction().array()); // TODO[REEF-1113]: Handle serialization failure separately in Vortex vortexRequest = new TaskletExecutionRequest(taskletExecutionRequest.getTaskletId(), function, function.getInputCodec().decode(taskletExecutionRequest.getSerializedInput().array())); break; case CancelTasklet: final AvroTaskletCancellationRequest taskletCancellationRequest = (AvroTaskletCancellationRequest) avroVortexRequest .getTaskletRequest(); vortexRequest = new TaskletCancellationRequest(taskletCancellationRequest.getTaskletId()); break; default: throw new RuntimeException("Undefined VortexRequest type"); } return vortexRequest; }
From source file:org.apache.reef.vortex.common.VortexAvroUtils.java
/** * Deserialize byte array to WorkerReport. * @param bytes Byte array to deserialize. * @return De-serialized WorkerReport.//from w ww. j a v a2 s. c om */ public WorkerReport toWorkerReport(final byte[] bytes) { final AvroWorkerReport avroWorkerReport = toAvroObject(bytes, AvroWorkerReport.class); final List<TaskletReport> workerTaskletReports = new ArrayList<>(); for (final AvroTaskletReport avroTaskletReport : avroWorkerReport.getTaskletReports()) { final TaskletReport taskletReport; switch (avroTaskletReport.getReportType()) { case TaskletResult: final AvroTaskletResultReport taskletResultReport = (AvroTaskletResultReport) avroTaskletReport .getTaskletReport(); taskletReport = new TaskletResultReport(taskletResultReport.getTaskletId(), taskletResultReport.getSerializedOutput().array()); break; case TaskletAggregationResult: final AvroTaskletAggregationResultReport taskletAggregationResultReport = (AvroTaskletAggregationResultReport) avroTaskletReport .getTaskletReport(); taskletReport = new TaskletAggregationResultReport(taskletAggregationResultReport.getTaskletIds(), taskletAggregationResultReport.getSerializedOutput().array()); break; case TaskletCancelled: final AvroTaskletCancelledReport taskletCancelledReport = (AvroTaskletCancelledReport) avroTaskletReport .getTaskletReport(); taskletReport = new TaskletCancelledReport(taskletCancelledReport.getTaskletId()); break; case TaskletFailure: final AvroTaskletFailureReport taskletFailureReport = (AvroTaskletFailureReport) avroTaskletReport .getTaskletReport(); final Exception exception = (Exception) SerializationUtils .deserialize(taskletFailureReport.getSerializedException().array()); taskletReport = new TaskletFailureReport(taskletFailureReport.getTaskletId(), exception); break; case TaskletAggregationFailure: final AvroTaskletAggregationFailureReport taskletAggregationFailureReport = (AvroTaskletAggregationFailureReport) avroTaskletReport .getTaskletReport(); final Exception aggregationException = (Exception) SerializationUtils .deserialize(taskletAggregationFailureReport.getSerializedException().array()); taskletReport = new TaskletAggregationFailureReport(taskletAggregationFailureReport.getTaskletIds(), aggregationException); break; default: throw new RuntimeException("Undefined TaskletReport type"); } workerTaskletReports.add(taskletReport); } return new WorkerReport(workerTaskletReports); }
From source file:org.apache.reef.vortex.evaluator.VortexWorker.java
/** * Starts the scheduler & executor and waits until termination. *//*from www.j a va2s . c om*/ @Override public byte[] call(final byte[] memento) throws Exception { final ExecutorService schedulerThread = Executors.newSingleThreadExecutor(); final ExecutorService commandExecutor = Executors.newFixedThreadPool(numOfThreads); // Scheduling thread starts schedulerThread.execute(new Runnable() { @Override public void run() { while (true) { // Scheduler Thread: Pick a command to execute (For now, simple FIFO order) final byte[] message; try { message = pendingRequests.takeFirst(); } catch (InterruptedException e) { throw new RuntimeException(e); } // Scheduler Thread: Pass the command to the worker thread pool to be executed commandExecutor.execute(new Runnable() { @Override public void run() { // Command Executor: Deserialize the command final VortexRequest vortexRequest = (VortexRequest) SerializationUtils .deserialize(message); switch (vortexRequest.getType()) { case ExecuteTasklet: final TaskletExecutionRequest taskletExecutionRequest = (TaskletExecutionRequest) vortexRequest; try { // Command Executor: Execute the command final Serializable result = taskletExecutionRequest.execute(); // Command Executor: Tasklet successfully returns result final WorkerReport report = new TaskletResultReport<>( taskletExecutionRequest.getTaskletId(), result); workerReports.addLast(SerializationUtils.serialize(report)); } catch (Exception e) { // Command Executor: Tasklet throws an exception final WorkerReport report = new TaskletFailureReport( taskletExecutionRequest.getTaskletId(), e); workerReports.addLast(SerializationUtils.serialize(report)); } heartBeatTriggerManager.triggerHeartBeat(); break; default: throw new RuntimeException("Unknown Command"); } } }); } } }); terminated.await(); return null; }
From source file:org.archive.modules.recrawl.PersistProcessor.java
/** * Populates an environment db from a persist log. If historyMap is * not provided, only logs the entries that would have been populated. * //from www . j ava2s .co m * @param persistLogReader * persist log * @param historyMap * new environment db (or null for a dry run) * @return number of records * @throws UnsupportedEncodingException * @throws DatabaseException */ @SuppressWarnings({ "resource", "rawtypes" }) private static int populatePersistEnvFromLog(BufferedReader persistLogReader, StoredSortedMap<String, Map> historyMap) throws UnsupportedEncodingException, DatabaseException { int count = 0; Iterator<String> iter = new LineReadingIterator(persistLogReader); while (iter.hasNext()) { String line = iter.next(); if (line.length() == 0) { continue; } String[] splits = line.split(" "); if (splits.length != 2) { logger.severe("bad line has " + splits.length + " fields (should be 2): " + line); continue; } Map alist; try { alist = (Map) SerializationUtils.deserialize(Base64.decodeBase64(splits[1].getBytes("UTF-8"))); } catch (Exception e) { logger.severe("caught exception " + e + " deserializing line: " + line); continue; } if (logger.isLoggable(Level.FINE)) { logger.fine(splits[0] + " " + ArchiveUtils.prettyString(alist)); } if (historyMap != null) try { historyMap.put(splits[0], alist); } catch (Exception e) { logger.log(Level.SEVERE, "caught exception after loading " + count + " urls from the persist log (perhaps crawl was stopped by user?)", e); IOUtils.closeQuietly(persistLogReader); // seems to finish most cleanly when we return rather than throw something return count; } count++; } IOUtils.closeQuietly(persistLogReader); return count; }
From source file:org.archive.state.ModuleTestBase.java
/** * Tests that the module can be serialized. The value returned by * {@link #makeModule} is serialized to a byte array, and then * deserialized, and then serialized to a second byte array. The results * are passed to {@link #verifySerialization}, which will simply compare * the two byte arrays for equality. (That won't always work; see * that method for details)./*from w ww . j a v a 2 s . co m*/ * * <p>If nothing else, this test is useful for catching NotSerializable * exceptions for your module or classes it depends on. * * @throws Exception if the module cannot be serialized */ public void testSerializationIfAppropriate() throws Exception { Object first = makeModule(); if (!(first instanceof Serializable)) { return; } byte[] firstBytes = SerializationUtils.serialize((Serializable) first); Object second = SerializationUtils.deserialize(firstBytes); byte[] secondBytes = SerializationUtils.serialize((Serializable) second); Object third = SerializationUtils.deserialize(secondBytes); byte[] thirdBytes = SerializationUtils.serialize((Serializable) third); // HashMap serialization reverses order of items in linked buckets // each roundtrip -- so don't check one roundtrip, check two. // // NOTE This is JVM-dependent behaviour, and since <= 1.7.0_u51 this // ordering of serialisation cannot be relied upon. However, a TreeMap // can be used instead of a HashMap, and this appears to have // predictable serialisation behaviour. // // @see // http://stackoverflow.com/questions/22392258/serialization-round-trip-of-hash-map-does-not-preserve-order // // verifySerialization(first, firstBytes, second, secondBytes); verifySerialization(first, firstBytes, third, thirdBytes); }