package scheduler
Spark's scheduling components. This includes the org.apache.spark.scheduler.DAGScheduler
and
lower level org.apache.spark.scheduler.TaskScheduler
.
- Source
- package.scala
- Alphabetic
- By Inheritance
- scheduler
- AnyRef
- Any
- Hide All
- Show All
- Public
- Protected
Type Members
- case class AccumulableInfo extends Product with Serializable
:: DeveloperApi :: Information about an org.apache.spark.util.AccumulatorV2 modified during a task or stage.
:: DeveloperApi :: Information about an org.apache.spark.util.AccumulatorV2 modified during a task or stage.
- Annotations
- @DeveloperApi()
- Note
Once this is JSON serialized the types of
update
andvalue
will be lost and be cast to strings. This is because the user can define an accumulator of any type and it will be difficult to preserve the type in consumers of the event log. This does not apply to internal accumulators that represent task level metrics.
- class InputFormatInfo extends Logging
:: DeveloperApi :: Parses and holds information about inputFormat (and files) specified as a parameter.
:: DeveloperApi :: Parses and holds information about inputFormat (and files) specified as a parameter.
- Annotations
- @DeveloperApi()
- sealed trait JobResult extends AnyRef
:: DeveloperApi :: A result of a job in the DAGScheduler.
:: DeveloperApi :: A result of a job in the DAGScheduler.
- Annotations
- @DeveloperApi()
- class MiscellaneousProcessDetails extends Serializable
:: DeveloperApi :: Stores information about an Miscellaneous Process to pass from the scheduler to SparkListeners.
:: DeveloperApi :: Stores information about an Miscellaneous Process to pass from the scheduler to SparkListeners.
- Annotations
- @DeveloperApi() @Since("3.2.0")
- abstract class SparkListener extends SparkListenerInterface
:: DeveloperApi :: A default implementation for
SparkListenerInterface
that has no-op implementations for all callbacks.:: DeveloperApi :: A default implementation for
SparkListenerInterface
that has no-op implementations for all callbacks.Note that this is an internal interface which might change in different Spark releases.
- Annotations
- @DeveloperApi()
- case class SparkListenerApplicationEnd(time: Long, exitCode: Option[Int] = None) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi()
- case class SparkListenerApplicationStart(appName: String, appId: Option[String], time: Long, sparkUser: String, appAttemptId: Option[String], driverLogs: Option[Map[String, String]] = None, driverAttributes: Option[Map[String, String]] = None) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi()
- case class SparkListenerBlockManagerAdded(time: Long, blockManagerId: BlockManagerId, maxMem: Long, maxOnHeapMem: Option[Long] = None, maxOffHeapMem: Option[Long] = None) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi()
- case class SparkListenerBlockManagerRemoved(time: Long, blockManagerId: BlockManagerId) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi()
- case class SparkListenerBlockUpdated(blockUpdatedInfo: BlockUpdatedInfo) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi()
- case class SparkListenerEnvironmentUpdate(environmentDetails: Map[String, Seq[(String, String)]]) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi()
- trait SparkListenerEvent extends AnyRef
- Annotations
- @DeveloperApi() @JsonTypeInfo()
- case class SparkListenerExecutorAdded(time: Long, executorId: String, executorInfo: ExecutorInfo) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi()
- case class SparkListenerExecutorExcluded(time: Long, executorId: String, taskFailures: Int) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi() @Since("3.1.0")
- case class SparkListenerExecutorExcludedForStage(time: Long, executorId: String, taskFailures: Int, stageId: Int, stageAttemptId: Int) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi() @Since("3.1.0")
- case class SparkListenerExecutorMetricsUpdate(execId: String, accumUpdates: Seq[(Long, Int, Int, Seq[AccumulableInfo])], executorUpdates: Map[(Int, Int), ExecutorMetrics] = Map.empty) extends SparkListenerEvent with Product with Serializable
Periodic updates from executors.
Periodic updates from executors.
- execId
executor id
- accumUpdates
sequence of (taskId, stageId, stageAttemptId, accumUpdates)
- executorUpdates
executor level per-stage metrics updates
- Annotations
- @DeveloperApi()
- Since
3.1.0
- case class SparkListenerExecutorRemoved(time: Long, executorId: String, reason: String) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi()
- case class SparkListenerExecutorUnexcluded(time: Long, executorId: String) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi()
- case class SparkListenerJobEnd(jobId: Int, time: Long, jobResult: JobResult) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi()
- case class SparkListenerJobStart(jobId: Int, time: Long, stageInfos: Seq[StageInfo], properties: Properties = null) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi()
- case class SparkListenerLogStart(sparkVersion: String) extends SparkListenerEvent with Product with Serializable
An internal class that describes the metadata of an event log.
An internal class that describes the metadata of an event log.
- Annotations
- @DeveloperApi()
- case class SparkListenerMiscellaneousProcessAdded(time: Long, processId: String, info: MiscellaneousProcessDetails) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi() @Since("3.2.0")
- case class SparkListenerNodeExcluded(time: Long, hostId: String, executorFailures: Int) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi() @Since("3.1.0")
- case class SparkListenerNodeExcludedForStage(time: Long, hostId: String, executorFailures: Int, stageId: Int, stageAttemptId: Int) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi() @Since("3.1.0")
- case class SparkListenerNodeUnexcluded(time: Long, hostId: String) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi() @Since("3.1.0")
- case class SparkListenerResourceProfileAdded(resourceProfile: ResourceProfile) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi() @Since("3.1.0")
- case class SparkListenerSpeculativeTaskSubmitted(stageId: Int, stageAttemptId: Int = 0) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi()
- case class SparkListenerStageCompleted(stageInfo: StageInfo) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi()
- case class SparkListenerStageExecutorMetrics(execId: String, stageId: Int, stageAttemptId: Int, executorMetrics: ExecutorMetrics) extends SparkListenerEvent with Product with Serializable
Peak metric values for the executor for the stage, written to the history log at stage completion.
Peak metric values for the executor for the stage, written to the history log at stage completion.
- execId
executor id
- stageId
stage id
- stageAttemptId
stage attempt
- executorMetrics
executor level metrics peak values
- Annotations
- @DeveloperApi()
- case class SparkListenerStageSubmitted(stageInfo: StageInfo, properties: Properties = null) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi()
- case class SparkListenerTaskEnd(stageId: Int, stageAttemptId: Int, taskType: String, reason: TaskEndReason, taskInfo: TaskInfo, taskExecutorMetrics: ExecutorMetrics, taskMetrics: TaskMetrics) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi()
- case class SparkListenerTaskGettingResult(taskInfo: TaskInfo) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi()
- case class SparkListenerTaskStart(stageId: Int, stageAttemptId: Int, taskInfo: TaskInfo) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi()
- case class SparkListenerUnpersistRDD(rddId: Int) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi()
- case class SparkListenerUnschedulableTaskSetAdded(stageId: Int, stageAttemptId: Int) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi() @Since("3.1.0")
- case class SparkListenerUnschedulableTaskSetRemoved(stageId: Int, stageAttemptId: Int) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi() @Since("3.1.0")
- class SplitInfo extends AnyRef
- Annotations
- @DeveloperApi()
- class StageInfo extends AnyRef
:: DeveloperApi :: Stores information about a stage to pass from the scheduler to SparkListeners.
:: DeveloperApi :: Stores information about a stage to pass from the scheduler to SparkListeners.
- Annotations
- @DeveloperApi()
- class StatsReportListener extends SparkListener with Logging
:: DeveloperApi :: Simple SparkListener that logs a few summary statistics when each stage completes.
:: DeveloperApi :: Simple SparkListener that logs a few summary statistics when each stage completes.
- Annotations
- @DeveloperApi()
- class TaskInfo extends Cloneable
:: DeveloperApi :: Information about a running task attempt inside a TaskSet.
:: DeveloperApi :: Information about a running task attempt inside a TaskSet.
- Annotations
- @DeveloperApi()
Deprecated Type Members
- case class SparkListenerExecutorBlacklisted(time: Long, executorId: String, taskFailures: Int) extends SparkListenerEvent with Product with Serializable
- Annotations
- @DeveloperApi() @deprecated
- Deprecated
(Since version 3.1.0) use SparkListenerExecutorExcluded instead
- case class SparkListenerExecutorBlacklistedForStage(time: Long, executorId: String, taskFailures: Int, stageId: Int, stageAttemptId: Int) extends SparkListenerEvent with Product with Serializable
- Annotations
- @deprecated @DeveloperApi()
- Deprecated
(Since version 3.1.0) use SparkListenerExecutorExcludedForStage instead
- case class SparkListenerExecutorUnblacklisted(time: Long, executorId: String) extends SparkListenerEvent with Product with Serializable
- Annotations
- @deprecated @DeveloperApi()
- Deprecated
(Since version 3.1.0) use SparkListenerExecutorUnexcluded instead
- case class SparkListenerNodeBlacklisted(time: Long, hostId: String, executorFailures: Int) extends SparkListenerEvent with Product with Serializable
- Annotations
- @deprecated @DeveloperApi()
- Deprecated
(Since version 3.1.0) use SparkListenerNodeExcluded instead
- case class SparkListenerNodeBlacklistedForStage(time: Long, hostId: String, executorFailures: Int, stageId: Int, stageAttemptId: Int) extends SparkListenerEvent with Product with Serializable
- Annotations
- @deprecated @DeveloperApi()
- Deprecated
(Since version 3.1.0) use SparkListenerNodeExcludedForStage instead
- case class SparkListenerNodeUnblacklisted(time: Long, hostId: String) extends SparkListenerEvent with Product with Serializable
- Annotations
- @deprecated @DeveloperApi()
- Deprecated
(Since version 3.1.0) use SparkListenerNodeUnexcluded instead
Value Members
- object InputFormatInfo
- case object JobSucceeded extends JobResult with Product with Serializable
- Annotations
- @DeveloperApi()
- object SchedulingMode extends Enumeration
"FAIR" and "FIFO" determines which policy is used to order tasks amongst a Schedulable's sub-queues "NONE" is used when the a Schedulable has no sub-queues.
- object SplitInfo
- object TaskLocality extends Enumeration
- Annotations
- @DeveloperApi()