public class JobProgressListener extends Object implements SparkListener, Logging
All access to the data structures in this class must be synchronized on the class, since the UI thread and the EventBus loop may otherwise be reading and updating the internal data structures concurrently.
Constructor and Description |
---|
JobProgressListener(SparkConf conf) |
Modifier and Type | Method and Description |
---|---|
scala.collection.mutable.HashMap<Object,StageInfo> |
activeStages() |
scala.collection.Seq<BlockManagerId> |
blockManagerIds() |
scala.collection.mutable.ListBuffer<StageInfo> |
completedStages() |
static String |
DEFAULT_POOL_NAME() |
static int |
DEFAULT_RETAINED_STAGES() |
scala.collection.mutable.HashMap<String,BlockManagerId> |
executorIdToBlockManagerId() |
scala.collection.mutable.ListBuffer<StageInfo> |
failedStages() |
void |
onBlockManagerAdded(SparkListenerBlockManagerAdded blockManagerAdded)
Called when a new block manager has joined
|
void |
onBlockManagerRemoved(SparkListenerBlockManagerRemoved blockManagerRemoved)
Called when an existing block manager has been removed
|
void |
onEnvironmentUpdate(SparkListenerEnvironmentUpdate environmentUpdate)
Called when environment properties have been updated
|
void |
onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate executorMetricsUpdate)
Called when the driver receives task metrics from an executor in a heartbeat.
|
void |
onStageCompleted(SparkListenerStageCompleted stageCompleted)
Called when a stage completes successfully or fails, with information on the completed stage.
|
void |
onStageSubmitted(SparkListenerStageSubmitted stageSubmitted)
For FIFO, all stages are contained by "default" pool but "default" pool here is meaningless
|
void |
onTaskEnd(SparkListenerTaskEnd taskEnd)
Called when a task ends
|
void |
onTaskGettingResult(SparkListenerTaskGettingResult taskGettingResult)
Called when a task begins remotely fetching its result (will not be called for tasks that do
not need to fetch the result remotely).
|
void |
onTaskStart(SparkListenerTaskStart taskStart)
Called when a task starts
|
scala.collection.mutable.HashMap<String,scala.collection.mutable.HashMap<Object,StageInfo>> |
poolToActiveStages() |
int |
retainedStages() |
scala.Option<scala.Enumeration.Value> |
schedulingMode() |
scala.collection.mutable.HashMap<scala.Tuple2<Object,Object>,org.apache.spark.ui.jobs.UIData.StageUIData> |
stageIdToData() |
void |
updateAggregateMetrics(org.apache.spark.ui.jobs.UIData.StageUIData stageData,
String execId,
org.apache.spark.executor.TaskMetrics taskMetrics,
scala.Option<org.apache.spark.executor.TaskMetrics> oldMetrics)
Upon receiving new metrics for a task, updates the per-stage and per-executor-per-stage
aggregate metrics by calculating deltas between the currently recorded metrics and the new
metrics.
|
equals, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
onApplicationEnd, onApplicationStart, onJobEnd, onJobStart, onUnpersistRDD
initialized, initializeIfNecessary, initializeLogging, initLock, isTraceEnabled, log_, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning
public JobProgressListener(SparkConf conf)
public static String DEFAULT_POOL_NAME()
public static int DEFAULT_RETAINED_STAGES()
public int retainedStages()
public scala.collection.mutable.HashMap<Object,StageInfo> activeStages()
public scala.collection.mutable.HashMap<scala.Tuple2<Object,Object>,org.apache.spark.ui.jobs.UIData.StageUIData> stageIdToData()
public scala.collection.mutable.ListBuffer<StageInfo> completedStages()
public scala.collection.mutable.ListBuffer<StageInfo> failedStages()
public scala.collection.mutable.HashMap<String,scala.collection.mutable.HashMap<Object,StageInfo>> poolToActiveStages()
public scala.collection.mutable.HashMap<String,BlockManagerId> executorIdToBlockManagerId()
public scala.Option<scala.Enumeration.Value> schedulingMode()
public scala.collection.Seq<BlockManagerId> blockManagerIds()
public void onStageCompleted(SparkListenerStageCompleted stageCompleted)
SparkListener
onStageCompleted
in interface SparkListener
public void onStageSubmitted(SparkListenerStageSubmitted stageSubmitted)
onStageSubmitted
in interface SparkListener
public void onTaskStart(SparkListenerTaskStart taskStart)
SparkListener
onTaskStart
in interface SparkListener
public void onTaskGettingResult(SparkListenerTaskGettingResult taskGettingResult)
SparkListener
onTaskGettingResult
in interface SparkListener
public void onTaskEnd(SparkListenerTaskEnd taskEnd)
SparkListener
onTaskEnd
in interface SparkListener
public void updateAggregateMetrics(org.apache.spark.ui.jobs.UIData.StageUIData stageData, String execId, org.apache.spark.executor.TaskMetrics taskMetrics, scala.Option<org.apache.spark.executor.TaskMetrics> oldMetrics)
public void onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate executorMetricsUpdate)
SparkListener
onExecutorMetricsUpdate
in interface SparkListener
public void onEnvironmentUpdate(SparkListenerEnvironmentUpdate environmentUpdate)
SparkListener
onEnvironmentUpdate
in interface SparkListener
public void onBlockManagerAdded(SparkListenerBlockManagerAdded blockManagerAdded)
SparkListener
onBlockManagerAdded
in interface SparkListener
public void onBlockManagerRemoved(SparkListenerBlockManagerRemoved blockManagerRemoved)
SparkListener
onBlockManagerRemoved
in interface SparkListener