class RDDBarrier[T] extends AnyRef
:: Experimental :: Wraps an RDD in a barrier stage, which forces Spark to launch tasks of this stage together. org.apache.spark.rdd.RDDBarrier instances are created by org.apache.spark.rdd.RDD#barrier.
- Annotations
- @Experimental() @Since( "2.4.0" )
- Source
- RDDBarrier.scala
- Alphabetic
- By Inheritance
- RDDBarrier
- AnyRef
- Any
- Hide All
- Show All
- Public
- All
Value Members
-
final
def
!=(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
##(): Int
- Definition Classes
- AnyRef → Any
-
final
def
==(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
final
def
asInstanceOf[T0]: T0
- Definition Classes
- Any
-
def
clone(): AnyRef
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()
-
final
def
eq(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
def
equals(arg0: Any): Boolean
- Definition Classes
- AnyRef → Any
-
def
finalize(): Unit
- Attributes
- protected[lang]
- Definition Classes
- AnyRef
- Annotations
- @throws( classOf[java.lang.Throwable] )
-
final
def
getClass(): Class[_]
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
def
hashCode(): Int
- Definition Classes
- AnyRef → Any
- Annotations
- @native()
-
final
def
isInstanceOf[T0]: Boolean
- Definition Classes
- Any
-
def
mapPartitions[S](f: (Iterator[T]) ⇒ Iterator[S], preservesPartitioning: Boolean = false)(implicit arg0: ClassTag[S]): RDD[S]
:: Experimental :: Returns a new RDD by applying a function to each partition of the wrapped RDD, where tasks are launched together in a barrier stage.
:: Experimental :: Returns a new RDD by applying a function to each partition of the wrapped RDD, where tasks are launched together in a barrier stage. The interface is the same as org.apache.spark.rdd.RDD#mapPartitions. Please see the API doc there.
- Annotations
- @Experimental() @Since( "2.4.0" )
- See also
-
def
mapPartitionsWithEvaluator[U](evaluatorFactory: PartitionEvaluatorFactory[T, U])(implicit arg0: ClassTag[U]): RDD[U]
Return a new RDD by applying an evaluator to each partition of the wrapped RDD.
Return a new RDD by applying an evaluator to each partition of the wrapped RDD. The given evaluator factory will be serialized and sent to executors, and each task will create an evaluator with the factory, and use the evaluator to transform the data of the input partition.
- Annotations
- @DeveloperApi() @Since( "3.5.0" )
-
def
mapPartitionsWithIndex[S](f: (Int, Iterator[T]) ⇒ Iterator[S], preservesPartitioning: Boolean = false)(implicit arg0: ClassTag[S]): RDD[S]
:: Experimental :: Returns a new RDD by applying a function to each partition of the wrapped RDD, while tracking the index of the original partition.
:: Experimental :: Returns a new RDD by applying a function to each partition of the wrapped RDD, while tracking the index of the original partition. And all tasks are launched together in a barrier stage. The interface is the same as org.apache.spark.rdd.RDD#mapPartitionsWithIndex. Please see the API doc there.
- Annotations
- @Experimental() @Since( "3.0.0" )
- See also
-
final
def
ne(arg0: AnyRef): Boolean
- Definition Classes
- AnyRef
-
final
def
notify(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
notifyAll(): Unit
- Definition Classes
- AnyRef
- Annotations
- @native()
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
- Definition Classes
- AnyRef
-
def
toString(): String
- Definition Classes
- AnyRef → Any
-
final
def
wait(): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long, arg1: Int): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... )
-
final
def
wait(arg0: Long): Unit
- Definition Classes
- AnyRef
- Annotations
- @throws( ... ) @native()