List of usage examples for org.apache.spark.api.java.function PairFlatMapFunction interface-usage
From source file org.apache.eagle.alert.engine.spark.function.CorrelationSpoutSparkFunction.java
public class CorrelationSpoutSparkFunction implements PairFlatMapFunction<Tuple2<String, String>, Integer, PartitionedEvent> { private static final long serialVersionUID = -5281723341236671580L; private static final Logger LOG = LoggerFactory.getLogger(CorrelationSpoutSparkFunction.class);
From source file org.apache.eagle.alert.engine.spark.function.StreamRouteBoltFunction.java
public class StreamRouteBoltFunction implements PairFlatMapFunction<Iterator<Tuple2<Integer, Iterable<PartitionedEvent>>>, Integer, PartitionedEvent> { private static final Logger LOG = LoggerFactory.getLogger(StreamRouteBoltFunction.class); private static final long serialVersionUID = -7211470889316430372L; private AtomicReference<Map<String, StreamDefinition>> sdsRef;
From source file org.apache.hadoop.hive.ql.exec.spark.HivePairFlatMapFunction.java
public abstract class HivePairFlatMapFunction<T, K, V> implements PairFlatMapFunction<T, K, V> { private final NumberFormat taskIdFormat = NumberFormat.getInstance(); private final NumberFormat stageIdFormat = NumberFormat.getInstance(); { taskIdFormat.setGroupingUsed(false); taskIdFormat.setMinimumIntegerDigits(6);
From source file org.apache.mrql.FmFunction.java
abstract class PairFmFunction<T, K, V> implements PairFlatMapFunction<T, K, V> { abstract Iterator<Tuple2<K, V>> eval(T t) throws Exception; public Iterable<Tuple2<K, V>> call(final T t) throws Exception { return new Iterable<Tuple2<K, V>>() { public Iterator<Tuple2<K, V>> iterator() {
From source file org.apache.mrql.FmFunction.java
abstract class PairFmFunction<T, K, V> implements PairFlatMapFunction<T, K, V> { abstract Iterator<Tuple2<K, V>> eval(T t) throws Exception; public Iterator<Tuple2<K, V>> call(final T t) throws Exception { return eval(t); }
From source file org.apache.pirk.responder.wideskies.spark.EncRowCalc.java
/**
* Function to calculate the encrypted rows of the encrypted query
* <p>
* For each row (as indicated by key = hash(selector)), iterates over each dataElement and calculates the column values.
* <p>
* Emits {@code <colNum, colVal>}
From source file org.apache.pirk.responder.wideskies.spark.EncRowCalcPrecomputedCache.java
/** * Functionality for computing the encrypted rows using a pre-computed, passed in modular exponentiation lookup table */ public class EncRowCalcPrecomputedCache implements PairFlatMapFunction<Tuple2<Integer, Tuple2<Iterable<Tuple2<Integer, BigInteger>>, Iterable<List<BigInteger>>>>, Long, BigInteger> { private static final long serialVersionUID = 1L;
From source file org.apache.pirk.responder.wideskies.spark.ExpKeyFilenameMap.java
/** * Class to map the query hash to its modular exponentiation lookup file in hdfs */ public class ExpKeyFilenameMap implements PairFlatMapFunction<Iterator<Tuple2<Integer, Iterable<Tuple2<Integer, BigInteger>>>>, Integer, String> { private static final long serialVersionUID = 1L;
From source file org.apache.pirk.responder.wideskies.spark.ExpTableGenerator.java
/** * Class to generate the query element modular exponentiations * <p> * */ public class ExpTableGenerator implements PairFlatMapFunction<Integer, Integer, Tuple2<Integer, BigInteger>> {
From source file org.apache.sysml.runtime.controlprogram.paramserv.dp.DataPartitionerSparkMapper.java
public class DataPartitionerSparkMapper implements PairFlatMapFunction<Tuple2<Long, Tuple2<MatrixBlock, MatrixBlock>>, Integer, Tuple2<Long, Tuple2<MatrixBlock, MatrixBlock>>>, Serializable { private static final long serialVersionUID = 1710721606050403296L; private int _workersNum;