List of usage examples for org.apache.commons.lang SerializationUtils serialize
public static byte[] serialize(Serializable obj)
Serializes an Object
to a byte array for storage/serialization.
From source file:org.apache.hama.ml.perception.SmallMultiLayerPerceptron.java
@Override public void write(DataOutput output) throws IOException { WritableUtils.writeString(output, MLPType); output.writeDouble(learningRate);/* w ww . ja v a 2 s . c om*/ output.writeDouble(regularization); output.writeDouble(momentum); output.writeInt(numberOfLayers); WritableUtils.writeString(output, squashingFunctionName); WritableUtils.writeString(output, costFunctionName); // write the number of neurons for each layer for (int i = 0; i < this.numberOfLayers; ++i) { output.writeInt(this.layerSizeArray[i]); } for (int i = 0; i < numberOfLayers - 1; ++i) { MatrixWritable matrixWritable = new MatrixWritable(this.weightMatrice[i]); matrixWritable.write(output); } // serialize the feature transformer Class<? extends FeatureTransformer> featureTransformerCls = this.featureTransformer.getClass(); byte[] featureTransformerBytes = SerializationUtils.serialize(featureTransformerCls); output.writeInt(featureTransformerBytes.length); output.write(featureTransformerBytes); }
From source file:org.apache.hama.ml.perception.TestSmallMultiLayerPerceptron.java
/** * Test the output of an example MLP./*from w w w . j a v a 2 s . co m*/ */ @Test public void testOutput() { // write the MLP meta-data manually String modelPath = "/tmp/sampleModel-testOutput.data"; Configuration conf = new Configuration(); try { FileSystem fs = FileSystem.get(conf); FSDataOutputStream output = fs.create(new Path(modelPath), true); String MLPType = SmallMultiLayerPerceptron.class.getName(); double learningRate = 0.5; double regularization = 0.0; double momentum = 0.1; String squashingFunctionName = "Sigmoid"; String costFunctionName = "SquaredError"; int[] layerSizeArray = new int[] { 3, 2, 3, 3 }; int numberOfLayers = layerSizeArray.length; WritableUtils.writeString(output, MLPType); output.writeDouble(learningRate); output.writeDouble(regularization); output.writeDouble(momentum); output.writeInt(numberOfLayers); WritableUtils.writeString(output, squashingFunctionName); WritableUtils.writeString(output, costFunctionName); // write the number of neurons for each layer for (int i = 0; i < numberOfLayers; ++i) { output.writeInt(layerSizeArray[i]); } double[][] matrix01 = { // 4 by 2 { 0.5, 0.2 }, { 0.1, 0.1 }, { 0.2, 0.5 }, { 0.1, 0.5 } }; double[][] matrix12 = { // 3 by 3 { 0.1, 0.2, 0.5 }, { 0.2, 0.5, 0.2 }, { 0.5, 0.5, 0.1 } }; double[][] matrix23 = { // 4 by 3 { 0.2, 0.5, 0.2 }, { 0.5, 0.1, 0.5 }, { 0.1, 0.2, 0.1 }, { 0.1, 0.2, 0.5 } }; DoubleMatrix[] matrices = { new DenseDoubleMatrix(matrix01), new DenseDoubleMatrix(matrix12), new DenseDoubleMatrix(matrix23) }; for (DoubleMatrix mat : matrices) { MatrixWritable.write(mat, output); } // serialize the feature transformer FeatureTransformer transformer = new DefaultFeatureTransformer(); Class<? extends FeatureTransformer> featureTransformerCls = transformer.getClass(); byte[] featureTransformerBytes = SerializationUtils.serialize(featureTransformerCls); output.writeInt(featureTransformerBytes.length); output.write(featureTransformerBytes); output.close(); } catch (IOException e) { e.printStackTrace(); } // initial the mlp with existing model meta-data and get the output MultiLayerPerceptron mlp = new SmallMultiLayerPerceptron(modelPath); DoubleVector input = new DenseDoubleVector(new double[] { 1, 2, 3 }); try { DoubleVector result = mlp.output(input); assertArrayEquals(new double[] { 0.6636557, 0.7009963, 0.7213835 }, result.toArray(), 0.0001); } catch (Exception e1) { e1.printStackTrace(); } // delete meta-data try { FileSystem fs = FileSystem.get(conf); fs.delete(new Path(modelPath), true); } catch (IOException e) { e.printStackTrace(); } }
From source file:org.apache.horn.core.AbstractNeuralNetwork.java
@Override public void write(DataOutput output) throws IOException { // write model type WritableUtils.writeString(output, modelType); // write learning rate output.writeFloat(learningRate);/*from ww w . j a va 2s . c o m*/ // write model path if (this.modelPath != null) { WritableUtils.writeString(output, modelPath); } else { WritableUtils.writeString(output, "null"); } // serialize the class Class<? extends FloatFeatureTransformer> featureTransformerCls = this.featureTransformer.getClass(); byte[] featureTransformerBytes = SerializationUtils.serialize(featureTransformerCls); output.writeInt(featureTransformerBytes.length); output.write(featureTransformerBytes); }
From source file:org.apache.lens.api.serialize.SerializationTest.java
public void verifySerializationAndDeserialization(final Serializable originalSerializable) { byte[] bytes = SerializationUtils.serialize(originalSerializable); T objectCreatedFromBytes = (T) SerializationUtils.deserialize(bytes); assertEquals(objectCreatedFromBytes, originalSerializable); }
From source file:org.apache.metamodel.util.HdfsResourceTest.java
public void testSerialization() throws Exception { final HdfsResource res1 = new HdfsResource("hdfs://localhost:9000/home/metamodel.txt"); final byte[] bytes = SerializationUtils.serialize(res1); final Object res2 = SerializationUtils.deserialize(bytes); assertEquals(res1, res2);/*w w w. j a va 2s .co m*/ }
From source file:org.apache.metron.parsers.integration.ParserDriver.java
public ProcessorResult<List<byte[]>> run(Iterable<byte[]> in) { ShimParserBolt bolt = new ShimParserBolt(new ArrayList<>()); byte[] b = SerializationUtils.serialize(bolt); ShimParserBolt b2 = (ShimParserBolt) SerializationUtils.deserialize(b); OutputCollector collector = mock(OutputCollector.class); bolt.prepare(null, null, collector); for (byte[] record : in) { bolt.execute(toTuple(record));//from w w w .j a v a 2 s. com } return bolt.getResults(); }
From source file:org.apache.metron.stellar.dsl.functions.HashFunctionsTest.java
@Test public void nonStringValueThatIsSerializableHashesSuccessfully() throws Exception { final String algorithm = "'md5'"; final String valueToHash = "'My value to hash'"; final Serializable input = (Serializable) Collections.singletonList(valueToHash); final MessageDigest expected = MessageDigest.getInstance(algorithm.replace("'", "")); expected.update(SerializationUtils.serialize(input)); final Map<String, Object> variables = new HashMap<>(); variables.put("toHash", input); assertEquals(expectedHexString(expected), run("HASH(toHash, " + algorithm + ")", variables)); }
From source file:org.apache.metron.stellar.dsl.functions.HashFunctionsTest.java
@Test public void callingHashFunctionsWithVariablesAsInputHashesSuccessfully() throws Exception { final String algorithm = "md5"; final String valueToHash = "'My value to hash'"; final Serializable input = (Serializable) Collections.singletonList(valueToHash); final MessageDigest expected = MessageDigest.getInstance(algorithm); expected.update(SerializationUtils.serialize(input)); final Map<String, Object> variables = new HashMap<>(); variables.put("toHash", input); variables.put("hashType", algorithm); assertEquals(expectedHexString(expected), run("HASH(toHash, hashType)", variables)); }
From source file:org.apache.ojb.broker.accesslayer.conversions.Object2ByteArrUncompressedFieldConversion.java
public Object javaToSql(Object source) { if (source == null) return null; try {/*from w w w .j a va 2 s .co m*/ return SerializationUtils.serialize((Serializable) source); } catch (Throwable t) { throw new ConversionException(t); } }
From source file:org.apache.ojb.broker.metadata.FieldDescriptor.java
public Object clone() { return SerializationUtils.deserialize(SerializationUtils.serialize(this)); }