List of usage examples for org.apache.hadoop.io.serializer Deserializer close
void close() throws IOException;
Close the underlying input stream and clear up any resources.
From source file:cascading.tuple.hadoop.SerializationElementReader.java
License:Open Source License
public void close() { if (deserializers.size() == 0) return;/*from w w w. jav a 2s . c om*/ Collection<Deserializer> clone = new ArrayList<Deserializer>(deserializers.values()); deserializers.clear(); for (Deserializer deserializer : clone) { try { deserializer.close(); } catch (IOException exception) { // do nothing } } }
From source file:com.datasalt.pangool.serialization.HadoopSerialization.java
License:Apache License
/** * Deseerializes into the given object using the Hadoop serialization system. * Object cannot be null./*from ww w .ja va 2 s . c o m*/ */ public <T> T deser(Object obj, InputStream in) throws IOException { Map<Class, Deserializer> deserializers = cachedDeserializers.get(); Deserializer deSer = deserializers.get(obj.getClass()); if (deSer == null) { deSer = serialization.getDeserializer(obj.getClass()); deserializers.put(obj.getClass(), deSer); } deSer.open(in); obj = deSer.deserialize(obj); deSer.close(); return (T) obj; }
From source file:com.datasalt.pangool.serialization.HadoopSerialization.java
License:Apache License
/** * Return a new instance of the given class with the deserialized data from * the input stream.//from w ww. java 2 s .co m */ public <T> T deser(Class clazz, InputStream in) throws IOException { Map<Class, Deserializer> deserializers = cachedDeserializers.get(); Deserializer deSer = deserializers.get(clazz); if (deSer == null) { deSer = serialization.getDeserializer(clazz); deserializers.put(clazz, deSer); } deSer.open(in); Object obj = deSer.deserialize(null); deSer.close(); return (T) obj; }
From source file:com.datasalt.pangool.serialization.HadoopSerialization.java
License:Apache License
/** * Deserialize an object using Hadoop serialization from a byte array. The * object cannot be null.//from w w w . j a v a 2s. c o m */ public <T> T deser(Object obj, byte[] array, int offset, int length) throws IOException { Map<Class, Deserializer> deserializers = cachedDeserializers.get(); Deserializer deSer = deserializers.get(obj.getClass()); if (deSer == null) { deSer = serialization.getDeserializer(obj.getClass()); deserializers.put(obj.getClass(), deSer); } DataInputBuffer baIs = cachedInputStream.get(); baIs.reset(array, offset, length); deSer.open(baIs); obj = deSer.deserialize(obj); deSer.close(); baIs.close(); return (T) obj; }
From source file:com.datasalt.pangool.tuplemr.serialization.SimpleTupleDeserializer.java
License:Apache License
protected void readCustomObject(DataInputStream input, ITuple tuple, Class<?> expectedType, int index, Deserializer customDeser) throws IOException { int size = WritableUtils.readVInt(input); if (size >= 0) { Object object = tuple.get(index); if (customDeser != null) { customDeser.open(input);/*w w w . jav a 2s . co m*/ object = customDeser.deserialize(object); customDeser.close(); tuple.set(index, object); } else { if (object == null) { tuple.set(index, ReflectionUtils.newInstance(expectedType, conf)); } tmpInputBuffer.setSize(size); input.readFully(tmpInputBuffer.getBytes(), 0, size); Object ob = ser.deser(tuple.get(index), tmpInputBuffer.getBytes(), 0, size); tuple.set(index, ob); } } else { throw new IOException("Error deserializing, custom object serialized with negative length : " + size); } }
From source file:com.datasalt.utils.io.Serialization.java
License:Apache License
public <T> T deser(Object obj, byte[] datum, int offset, int length) throws IOException { Deserializer deSer = serialization.getDeserializer(obj.getClass()); DataInputBuffer baIs = cachedInputStream.get(); baIs.reset(datum, length);/*from w ww . java 2 s . c om*/ deSer.open(baIs); obj = deSer.deserialize(obj); deSer.close(); baIs.close(); return (T) obj; }
From source file:com.datatorrent.demos.mroperator.OutputCollectorImpl.java
License:Open Source License
private <T> T cloneObj(T t) throws IOException { Serializer<T> keySerializer;// w ww. j a v a 2 s .c o m Class<T> keyClass; PipedInputStream pis = new PipedInputStream(); PipedOutputStream pos = new PipedOutputStream(pis); keyClass = (Class<T>) t.getClass(); keySerializer = serializationFactory.getSerializer(keyClass); keySerializer.open(pos); keySerializer.serialize(t); Deserializer<T> keyDesiralizer = serializationFactory.getDeserializer(keyClass); keyDesiralizer.open(pis); T clonedArg0 = keyDesiralizer.deserialize(null); pos.close(); pis.close(); keySerializer.close(); keyDesiralizer.close(); return clonedArg0; }
From source file:com.google.cloud.bigtable.beam.sequencefiles.HadoopSerializationCoder.java
License:Open Source License
/** {@inheritDoc} */ @Override//from w ww .j a v a 2s.c o m public T decode(InputStream inStream) throws CoderException, IOException { Deserializer<T> deserializer = serialization.getDeserializer(type); deserializer.open(new UncloseableInputStream(inStream)); try { return deserializer.deserialize(null); } finally { deserializer.close(); } }
From source file:cz.seznam.euphoria.hadoop.utils.Cloner.java
License:Apache License
/** * Help method retrieving a cloner for given class type from the * given configuration./*w w w. j a va 2s . c o m*/ * * @param <T> the type of objects the resulting cloner will be able to handle * * @param what the class for which to retrieve a cloner * @param conf the hadoop configuration defining the serializer/deserializer * to utilize for cloning * * @return a cloner instance able to clone objects of the specified type */ static <T> Cloner<T> get(Class<T> what, Configuration conf) { SerializationFactory factory = new SerializationFactory(conf); Serialization<T> serialization = factory.getSerialization(what); if (serialization == null) { // FIXME: if we cannot (de)serialize just do not clone return t -> t; } Deserializer<T> deserializer = serialization.getDeserializer(what); Serializer<T> serializer = serialization.getSerializer(what); return (T elem) -> { try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); serializer.open(baos); serializer.serialize(elem); serializer.close(); byte[] serialized = baos.toByteArray(); ByteArrayInputStream bais = new ByteArrayInputStream(serialized); deserializer.open(bais); T deserialized = deserializer.deserialize(null); deserializer.close(); return deserialized; } catch (IOException ex) { throw new RuntimeException(ex); } }; }
From source file:org.apache.avro.hadoop.io.TestAvroSerialization.java
License:Apache License
private <T, O> O roundTrip(Schema schema, T data, Class<? extends GenericData> modelClass) throws IOException { Job job = new Job(); AvroJob.setMapOutputKeySchema(job, schema); if (modelClass != null) AvroJob.setDataModelClass(job, modelClass); AvroSerialization serialization = ReflectionUtils.newInstance(AvroSerialization.class, job.getConfiguration());// ww w . j av a 2s . c o m Serializer<AvroKey<T>> serializer = serialization.getSerializer(AvroKey.class); Deserializer<AvroKey<O>> deserializer = serialization.getDeserializer(AvroKey.class); ByteArrayOutputStream baos = new ByteArrayOutputStream(); serializer.open(baos); serializer.serialize(new AvroKey<T>(data)); serializer.close(); ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray()); deserializer.open(bais); AvroKey<O> result = null; result = deserializer.deserialize(result); deserializer.close(); return result.datum(); }