參照MapPartionsRDD的寫法:ide
private[spark] class MapPartitionsRDD[U: ClassTag, T: ClassTag](
var prev: RDD[T],
f: (TaskContext, Int, Iterator[T]) => Iterator[U], // (TaskContext, partition index, iterator)
preservesPartitioning: Boolean = false)
extends RDD[U](prev) {spa
override val partitioner = if (preservesPartitioning) firstParent[T].partitioner else Noneci
override def getPartitions: Array[Partition] = firstParent[T].partitionsget
override def compute(split: Partition, context: TaskContext): Iterator[U] =
f(context, split.index, firstParent[T].iterator(split, context))it
override def clearDependencies() {
super.clearDependencies()
prev = null
}
}spark