List of usage examples for org.apache.hadoop.io.serializer Serialization getDeserializer
Deserializer<T> getDeserializer(Class<T> c);
From source file:com.datasalt.pangool.tuplemr.SerializationInfo.java
License:Apache License
public static Deserializer[] getDeserializers(Schema schema, Configuration conf) { Deserializer[] result = new Deserializer[schema.getFields().size()]; for (int i = 0; i < result.length; i++) { Field field = schema.getField(i); if (field.getObjectSerialization() != null) { Serialization serialization = ReflectionUtils.newInstance(field.getObjectSerialization(), conf); if (serialization instanceof FieldConfigurable) { ((FieldConfigurable) serialization).setFieldProperties(field.getProps()); }/*from www . jav a 2 s . c o m*/ result[i] = serialization.getDeserializer(field.getObjectClass()); } } return result; }
From source file:cz.seznam.euphoria.hadoop.utils.Cloner.java
License:Apache License
/** * Help method retrieving a cloner for given class type from the * given configuration./* w ww . j av a 2 s.c o m*/ * * @param <T> the type of objects the resulting cloner will be able to handle * * @param what the class for which to retrieve a cloner * @param conf the hadoop configuration defining the serializer/deserializer * to utilize for cloning * * @return a cloner instance able to clone objects of the specified type */ static <T> Cloner<T> get(Class<T> what, Configuration conf) { SerializationFactory factory = new SerializationFactory(conf); Serialization<T> serialization = factory.getSerialization(what); if (serialization == null) { // FIXME: if we cannot (de)serialize just do not clone return t -> t; } Deserializer<T> deserializer = serialization.getDeserializer(what); Serializer<T> serializer = serialization.getSerializer(what); return (T elem) -> { try { ByteArrayOutputStream baos = new ByteArrayOutputStream(); serializer.open(baos); serializer.serialize(elem); serializer.close(); byte[] serialized = baos.toByteArray(); ByteArrayInputStream bais = new ByteArrayInputStream(serialized); deserializer.open(bais); T deserialized = deserializer.deserialize(null); deserializer.close(); return deserialized; } catch (IOException ex) { throw new RuntimeException(ex); } }; }
From source file:org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopSerializationWrapper.java
License:Apache License
/** * @param serialization External serializer to wrap. * @param cls The class to serialize./*w ww . ja va2s.c o m*/ */ public HadoopSerializationWrapper(Serialization<T> serialization, Class<T> cls) throws IgniteCheckedException { assert cls != null; serializer = serialization.getSerializer(cls); deserializer = serialization.getDeserializer(cls); try { serializer.open(outStream); deserializer.open(inStream); } catch (IOException e) { throw new IgniteCheckedException(e); } }
From source file:org.apache.ignite.internal.processors.hadoop.impl.v2.HadoopV2TaskContext.java
License:Apache License
/** * @param split External split./*from www .j av a2 s . c o m*/ * @return Native input split. * @throws IgniteCheckedException If failed. */ @SuppressWarnings("unchecked") private Object readExternalSplit(HadoopExternalSplit split) throws IgniteCheckedException { Path jobDir = new Path(jobConf().get(MRJobConfig.MAPREDUCE_JOB_DIR)); FileSystem fs; try { fs = fileSystemForMrUserWithCaching(jobDir.toUri(), jobConf(), fsMap); } catch (IOException e) { throw new IgniteCheckedException(e); } try (FSDataInputStream in = fs.open(JobSubmissionFiles.getJobSplitFile(jobDir))) { in.seek(split.offset()); String clsName = Text.readString(in); Class<?> cls = jobConf().getClassByName(clsName); assert cls != null; Serialization serialization = new SerializationFactory(jobConf()).getSerialization(cls); Deserializer deserializer = serialization.getDeserializer(cls); deserializer.open(in); Object res = deserializer.deserialize(null); deserializer.close(); assert res != null; return res; } catch (IOException | ClassNotFoundException e) { throw new IgniteCheckedException(e); } }
From source file:org.apache.ignite.internal.processors.hadoop.v2.GridHadoopSerializationWrapper.java
License:Apache License
/** * @param serialization External serializer to wrap. * @param cls The class to serialize.//from www . ja v a 2 s .c o m */ public GridHadoopSerializationWrapper(Serialization<T> serialization, Class<T> cls) throws IgniteCheckedException { assert cls != null; serializer = serialization.getSerializer(cls); deserializer = serialization.getDeserializer(cls); try { serializer.open(outStream); deserializer.open(inStream); } catch (IOException e) { throw new IgniteCheckedException(e); } }
From source file:org.apache.ignite.internal.processors.hadoop.v2.GridHadoopV2TaskContext.java
License:Apache License
/** * @param split External split.//from ww w . j a v a2 s.c o m * @return Native input split. * @throws IgniteCheckedException If failed. */ @SuppressWarnings("unchecked") private Object readExternalSplit(GridHadoopExternalSplit split) throws IgniteCheckedException { Path jobDir = new Path(jobConf().get(MRJobConfig.MAPREDUCE_JOB_DIR)); try (FileSystem fs = FileSystem.get(jobDir.toUri(), jobConf()); FSDataInputStream in = fs.open(JobSubmissionFiles.getJobSplitFile(jobDir))) { in.seek(split.offset()); String clsName = Text.readString(in); Class<?> cls = jobConf().getClassByName(clsName); assert cls != null; Serialization serialization = new SerializationFactory(jobConf()).getSerialization(cls); Deserializer deserializer = serialization.getDeserializer(cls); deserializer.open(in); Object res = deserializer.deserialize(null); deserializer.close(); assert res != null; return res; } catch (IOException | ClassNotFoundException e) { throw new IgniteCheckedException(e); } }
From source file:org.apache.ignite.internal.processors.hadoop.v2.HadoopV2TaskContext.java
License:Apache License
/** * @param split External split./*from w w w.j a v a 2s .c o m*/ * @return Native input split. * @throws IgniteCheckedException If failed. */ @SuppressWarnings("unchecked") private Object readExternalSplit(HadoopExternalSplit split) throws IgniteCheckedException { Path jobDir = new Path(jobConf().get(MRJobConfig.MAPREDUCE_JOB_DIR)); try (FileSystem fs = FileSystem.get(jobDir.toUri(), jobConf()); FSDataInputStream in = fs.open(JobSubmissionFiles.getJobSplitFile(jobDir))) { in.seek(split.offset()); String clsName = Text.readString(in); Class<?> cls = jobConf().getClassByName(clsName); assert cls != null; Serialization serialization = new SerializationFactory(jobConf()).getSerialization(cls); Deserializer deserializer = serialization.getDeserializer(cls); deserializer.open(in); Object res = deserializer.deserialize(null); deserializer.close(); assert res != null; return res; } catch (IOException | ClassNotFoundException e) { throw new IgniteCheckedException(e); } }
From source file:org.gridgain.grid.kernal.processors.hadoop.v2.GridHadoopSerializationWrapper.java
License:Open Source License
/** * @param serialization External serializer to wrap. * @param cls The class to serialize./*from w w w . j ava2 s . c om*/ */ public GridHadoopSerializationWrapper(Serialization<T> serialization, Class<T> cls) throws GridException { assert cls != null; serializer = serialization.getSerializer(cls); deserializer = serialization.getDeserializer(cls); try { serializer.open(outStream); deserializer.open(inStream); } catch (IOException e) { throw new GridException(e); } }
From source file:org.gridgain.grid.kernal.processors.hadoop.v2.GridHadoopV2TaskContext.java
License:Open Source License
/** * @param split External split.// w w w . j a v a 2 s . c o m * @return Native input split. * @throws GridException If failed. */ @SuppressWarnings("unchecked") private Object readExternalSplit(GridHadoopExternalSplit split) throws GridException { Path jobDir = new Path(jobConf().get(MRJobConfig.MAPREDUCE_JOB_DIR)); try (FileSystem fs = FileSystem.get(jobDir.toUri(), jobConf()); FSDataInputStream in = fs.open(JobSubmissionFiles.getJobSplitFile(jobDir))) { in.seek(split.offset()); String clsName = Text.readString(in); Class<?> cls = jobConf().getClassByName(clsName); assert cls != null; Serialization serialization = new SerializationFactory(jobConf()).getSerialization(cls); Deserializer deserializer = serialization.getDeserializer(cls); deserializer.open(in); Object res = deserializer.deserialize(null); deserializer.close(); assert res != null; return res; } catch (IOException | ClassNotFoundException e) { throw new GridException(e); } }