List of usage examples for org.apache.commons.lang3 SerializationUtils serialize
public static byte[] serialize(final Serializable obj)
Serializes an Object to a byte array for storage/serialization.
From source file:org.apache.beam.sdk.io.hbase.SerializableScanTest.java
@Test public void testSerializationDeserialization() throws Exception { Scan scan = new Scan().setStartRow("1".getBytes("UTF-8")); byte[] object = SerializationUtils.serialize(new SerializableScan(scan)); SerializableScan serScan = SerializationUtils.deserialize(object); assertNotNull(serScan);/*from ww w .ja v a 2s . co m*/ assertEquals(new String(serScan.get().getStartRow(), "UTF-8"), "1"); }
From source file:org.apache.eagle.alert.engine.serialization.impl.JavaObjectSerializer.java
@Override public void serialize(Object value, DataOutput dataOutput) throws IOException { byte[] bytes = SerializationUtils.serialize((Serializable) value); dataOutput.writeInt(bytes.length);/* w w w.ja va2 s.co m*/ dataOutput.write(bytes); }
From source file:org.apache.eagle.alert.engine.serialization.JavaSerializationTest.java
@Test public void testJavaSerialization() { PartitionedEvent partitionedEvent = new PartitionedEvent(); partitionedEvent.setPartitionKey(partitionedEvent.hashCode()); partitionedEvent/* w ww.j av a 2s.c om*/ .setPartition(createSampleStreamGroupbyPartition("sampleStream", Arrays.asList("name", "host"))); StreamEvent event = new StreamEvent(); event.setStreamId("sampleStream"); event.setTimestamp(System.currentTimeMillis()); event.setData(new Object[] { "CPU", "LOCALHOST", true, Long.MAX_VALUE, 60.0 }); partitionedEvent.setEvent(event); int javaSerializationLength = SerializationUtils.serialize(partitionedEvent).length; LOG.info("Java serialization length: {}, event: {}", javaSerializationLength, partitionedEvent); int compactLength = 0; compactLength += "sampleStream".getBytes().length; compactLength += ByteUtils.intToBytes(partitionedEvent.getPartition().hashCode()).length; compactLength += ByteUtils.longToBytes(partitionedEvent.getTimestamp()).length; compactLength += "CPU".getBytes().length; compactLength += "LOCALHOST".getBytes().length; compactLength += 1; compactLength += ByteUtils.longToBytes(Long.MAX_VALUE).length; compactLength += ByteUtils.doubleToBytes(60.0).length; LOG.info("Compact serialization length: {}, event: {}", compactLength, partitionedEvent); Assert.assertTrue(compactLength * 20 < javaSerializationLength); }
From source file:org.apache.flink.api.common.accumulators.ListAccumulator.java
@Override public void add(T value) { byte[] byteArray = SerializationUtils.serialize((Serializable) value); localValue.add(byteArray); }
From source file:org.apache.flink.streaming.api.datastream.ConnectedDataStream.java
protected <OUT> SingleOutputStreamOperator<OUT, ?> addCoFunction(String functionName, final Function function, TypeWrapper<IN1> in1TypeWrapper, TypeWrapper<IN2> in2TypeWrapper, TypeWrapper<OUT> outTypeWrapper, CoInvokable<IN1, IN2, OUT> functionInvokable) { @SuppressWarnings({ "unchecked", "rawtypes" }) SingleOutputStreamOperator<OUT, ?> returnStream = new SingleOutputStreamOperator(environment, functionName, outTypeWrapper);/*from w w w . j av a 2s. c o m*/ try { dataStream1.jobGraphBuilder.addCoTask(returnStream.getId(), functionInvokable, in1TypeWrapper, in2TypeWrapper, outTypeWrapper, functionName, SerializationUtils.serialize((Serializable) function), environment.getDegreeOfParallelism()); } catch (SerializationException e) { throw new RuntimeException("Cannot serialize user defined function"); } dataStream1.connectGraph(dataStream1, returnStream.getId(), 1); dataStream1.connectGraph(dataStream2, returnStream.getId(), 2); return returnStream; }
From source file:org.apache.flink.streaming.api.datastream.DataStream.java
/** * Internal function for passing the user defined functions to the JobGraph * of the job.//from w w w . j av a 2s. c om * * @param functionName * name of the function * @param function * the user defined function * @param functionInvokable * the wrapping JobVertex instance * @param <R> * type of the return stream * @return the data stream constructed */ protected <R> SingleOutputStreamOperator<R, ?> addFunction(String functionName, final Function function, TypeWrapper<OUT> inTypeWrapper, TypeWrapper<R> outTypeWrapper, StreamInvokable<OUT, R> functionInvokable) { DataStream<OUT> inputStream = this.copy(); @SuppressWarnings({ "unchecked", "rawtypes" }) SingleOutputStreamOperator<R, ?> returnStream = new SingleOutputStreamOperator(environment, functionName, outTypeWrapper); try { jobGraphBuilder.addStreamVertex(returnStream.getId(), functionInvokable, inTypeWrapper, outTypeWrapper, functionName, SerializationUtils.serialize((Serializable) function), degreeOfParallelism); } catch (SerializationException e) { throw new RuntimeException("Cannot serialize user defined function"); } connectGraph(inputStream, returnStream.getId(), 0); if (inputStream instanceof IterativeDataStream) { IterativeDataStream<OUT> iterativeStream = (IterativeDataStream<OUT>) inputStream; returnStream.addIterationSource(iterativeStream.iterationID.toString(), iterativeStream.waitTime); } return returnStream; }
From source file:org.apache.flink.streaming.api.datastream.DataStream.java
private DataStreamSink<OUT> addSink(DataStream<OUT> inputStream, SinkFunction<OUT> sinkFunction, TypeWrapper<OUT> inTypeWrapper) { DataStreamSink<OUT> returnStream = new DataStreamSink<OUT>(environment, "sink", outTypeWrapper); try {/*from w ww. ja v a2 s.c om*/ jobGraphBuilder.addStreamVertex(returnStream.getId(), new SinkInvokable<OUT>(sinkFunction), inTypeWrapper, null, "sink", SerializationUtils.serialize(sinkFunction), degreeOfParallelism); } catch (SerializationException e) { throw new RuntimeException("Cannot serialize SinkFunction"); } inputStream.connectGraph(inputStream.copy(), returnStream.getId(), 0); return returnStream; }
From source file:org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator.java
/** * Operator used for directing tuples to specific named outputs using an * {@link OutputSelector}. Calling this method on an operator creates a new * {@link SplitDataStream}.// ww w .ja v a2 s.c o m * * @param outputSelector * The user defined {@link OutputSelector} for directing the * tuples. * @return The {@link SplitDataStream} */ public SplitDataStream<OUT> split(OutputSelector<OUT> outputSelector) { try { jobGraphBuilder.setOutputSelector(id, SerializationUtils.serialize(outputSelector)); } catch (SerializationException e) { throw new RuntimeException("Cannot serialize OutputSelector"); } return new SplitDataStream<OUT>(this); }
From source file:org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.java
/** * Creates a new DataStream that contains the given elements. The elements * must all be of the same type, for example, all of the String or Integer. * The sequence of elements must not be empty. Furthermore, the elements * must be serializable (as defined in java.io.Serializable), because the * execution environment may ship the elements into the cluster. * /*from w w w. j a va 2s. c o m*/ * @param data * The collection of elements to create the DataStream from. * @param <OUT> * type of the returned stream * @return The DataStream representing the elements. */ public <OUT extends Serializable> DataStreamSource<OUT> fromElements(OUT... data) { if (data.length == 0) { throw new IllegalArgumentException("fromElements needs at least one element as argument"); } TypeWrapper<OUT> outTypeWrapper = new ObjectTypeWrapper<OUT>(data[0]); DataStreamSource<OUT> returnStream = new DataStreamSource<OUT>(this, "elements", outTypeWrapper); try { SourceFunction<OUT> function = new FromElementsFunction<OUT>(data); jobGraphBuilder.addStreamVertex(returnStream.getId(), new SourceInvokable<OUT>(function), null, outTypeWrapper, "source", SerializationUtils.serialize(function), 1); } catch (SerializationException e) { throw new RuntimeException("Cannot serialize elements"); } return returnStream; }
From source file:org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.java
/** * Creates a DataStream from the given non-empty collection. The type of the * DataStream is that of the elements in the collection. The elements need * to be serializable (as defined by java.io.Serializable), because the * framework may move the elements into the cluster if needed. * //w ww. j ava 2s . c o m * @param data * The collection of elements to create the DataStream from. * @param <OUT> * type of the returned stream * @return The DataStream representing the elements. */ public <OUT extends Serializable> DataStreamSource<OUT> fromCollection(Collection<OUT> data) { if (data == null) { throw new NullPointerException("Collection must not be null"); } if (data.isEmpty()) { throw new IllegalArgumentException("Collection must not be empty"); } TypeWrapper<OUT> outTypeWrapper = new ObjectTypeWrapper<OUT>(data.iterator().next()); DataStreamSource<OUT> returnStream = new DataStreamSource<OUT>(this, "elements", outTypeWrapper); try { SourceFunction<OUT> function = new FromElementsFunction<OUT>(data); jobGraphBuilder.addStreamVertex(returnStream.getId(), new SourceInvokable<OUT>(new FromElementsFunction<OUT>(data)), null, new ObjectTypeWrapper<OUT>(data.iterator().next()), "source", SerializationUtils.serialize(function), 1); } catch (SerializationException e) { throw new RuntimeException("Cannot serialize collection"); } return returnStream; }