List of usage examples for org.apache.hadoop.io.serializer Serialization interface-usage
From source file cascading.avro.serialization.AvroSpecificRecordSerialization.java
/** * The {@link org.apache.hadoop.io.serializer.Serialization} used by jobs configured with {@link org.apache.avro.mapred.AvroJob}. */ public class AvroSpecificRecordSerialization<T> extends Configured implements Serialization<T> { public boolean accept(Class<?> c) {
From source file cascading.tuple.hadoop.BigDecimalSerialization.java
/**
* Class BigDecimalSerialization is an implementation of Hadoop's {@link org.apache.hadoop.io.serializer.Serialization} interface for use
* by {@link BigDecimal} instances.
* <p/>
* To use, call<br/>
* {@code TupleSerializationProps.addSerialization(properties, BigDecimalSerialization.class.getName());}
From source file cascading.tuple.hadoop.BytesSerialization.java
/**
* Class BytesSerialization is an implementation of Hadoop's {@link Serialization} interface for use
* by {@code byte} arrays ({@code byte[]}).
* <p/>
* To use, call<br/>
* {@code TupleSerialization.addSerialization(properties,BytesSerialization.class.getName() );}
From source file cascading.tuple.hadoop.TestSerialization.java
/** * */ @SerializationToken(tokens = { 222 }, classNames = { "cascading.tuple.hadoop.TestText" }) public class TestSerialization extends Configured implements Comparison<TestText>, Serialization<TestText> {
From source file cascading.tuple.hadoop.TupleSerialization.java
/**
* Class TupleSerialization is an implementation of Hadoop's {@link Serialization} interface.
* <p/>
* Typically developers will not use this implementation directly as it is automatically added
* to any relevant MapReduce jobs via the {@link JobConf}.
* <p/>
From source file colossal.pipe.ColAvroSerialization.java
/** * Avro serialization format used by Colossal pipe. Derived from the Avro map/reduce framework format. * The {@link Serialization} used by jobs configured with {@link ColPhase}. */ public class ColAvroSerialization<T> extends Configured implements Serialization<AvroWrapper<T>> {
From source file com.ci.backports.avro.mapred.AvroSerialization.java
/** The {@link Serialization} used by jobs configured with {@link AvroJob}. */ public class AvroSerialization<T> extends Configured implements Serialization<AvroWrapper<T>> { public boolean accept(Class<?> c) { return AvroWrapper.class.isAssignableFrom(c); }
From source file com.cloudera.crunch.type.avro.SafeAvroSerialization.java
/** The {@link Serialization} used by jobs configured with {@link AvroJob}. */ public class SafeAvroSerialization<T> extends Configured implements Serialization<AvroWrapper<T>> { public boolean accept(Class<?> c) { return AvroWrapper.class.isAssignableFrom(c); }
From source file com.conversantmedia.mapreduce.io.avro.MultiSchemaAvroSerialization.java
/** * * * @param <T> Avro record type */ public class MultiSchemaAvroSerialization<T> extends Configured implements Serialization<AvroMultiWrapper<T>> {
From source file com.conversantmedia.mapreduce.io.CompositeSortKeySerialization.java
/**
* Encapsulates the serialization functionality for a job
* using the {@link CompositeSortKey} key.
*
* @param <G> Grouping/Partitioning key type
* @param <S> Sorting key type