public class StatsReportListener extends SparkListener implements Logging
Constructor and Description |
---|
StatsReportListener() |
Modifier and Type | Method and Description |
---|---|
static scala.Option<org.apache.spark.util.Distribution> |
extractDoubleDistribution(scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics,
scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,Object> getMetric) |
static scala.Option<org.apache.spark.util.Distribution> |
extractLongDistribution(scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics,
scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,Object> getMetric) |
static long |
hours() |
static String |
millisToString(long ms)
Reformat a time interval in milliseconds to a prettier format for output
|
static long |
minutes() |
static void |
onApplicationEnd(SparkListenerApplicationEnd applicationEnd) |
static void |
onApplicationStart(SparkListenerApplicationStart applicationStart) |
static void |
onBlockManagerAdded(SparkListenerBlockManagerAdded blockManagerAdded) |
static void |
onBlockManagerRemoved(SparkListenerBlockManagerRemoved blockManagerRemoved) |
static void |
onBlockUpdated(SparkListenerBlockUpdated blockUpdated) |
static void |
onEnvironmentUpdate(SparkListenerEnvironmentUpdate environmentUpdate) |
static void |
onExecutorAdded(SparkListenerExecutorAdded executorAdded) |
static void |
onExecutorBlacklisted(SparkListenerExecutorBlacklisted executorBlacklisted) |
static void |
onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate executorMetricsUpdate) |
static void |
onExecutorRemoved(SparkListenerExecutorRemoved executorRemoved) |
static void |
onExecutorUnblacklisted(SparkListenerExecutorUnblacklisted executorUnblacklisted) |
static void |
onJobEnd(SparkListenerJobEnd jobEnd) |
static void |
onJobStart(SparkListenerJobStart jobStart) |
static void |
onNodeBlacklisted(SparkListenerNodeBlacklisted nodeBlacklisted) |
static void |
onNodeUnblacklisted(SparkListenerNodeUnblacklisted nodeUnblacklisted) |
static void |
onOtherEvent(SparkListenerEvent event) |
static void |
onSpeculativeTaskSubmitted(SparkListenerSpeculativeTaskSubmitted speculativeTask) |
void |
onStageCompleted(SparkListenerStageCompleted stageCompleted)
Called when a stage completes successfully or fails, with information on the completed stage.
|
static void |
onStageSubmitted(SparkListenerStageSubmitted stageSubmitted) |
void |
onTaskEnd(SparkListenerTaskEnd taskEnd)
Called when a task ends
|
static void |
onTaskGettingResult(SparkListenerTaskGettingResult taskGettingResult) |
static void |
onTaskStart(SparkListenerTaskStart taskStart) |
static void |
onUnpersistRDD(SparkListenerUnpersistRDD unpersistRDD) |
static int[] |
percentiles() |
static String |
percentilesHeader() |
static double[] |
probabilities() |
static long |
seconds() |
static void |
showBytesDistribution(String heading,
scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,Object> getMetric,
scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics) |
static void |
showBytesDistribution(String heading,
scala.Option<org.apache.spark.util.Distribution> dOpt) |
static void |
showBytesDistribution(String heading,
org.apache.spark.util.Distribution dist) |
static void |
showDistribution(String heading,
scala.Option<org.apache.spark.util.Distribution> dOpt,
scala.Function1<Object,String> formatNumber) |
static void |
showDistribution(String heading,
scala.Option<org.apache.spark.util.Distribution> dOpt,
String format) |
static void |
showDistribution(String heading,
org.apache.spark.util.Distribution d,
scala.Function1<Object,String> formatNumber) |
static void |
showDistribution(String heading,
String format,
scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,Object> getMetric,
scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics) |
static void |
showMillisDistribution(String heading,
scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,Object> getMetric,
scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics) |
static void |
showMillisDistribution(String heading,
scala.Option<org.apache.spark.util.Distribution> dOpt) |
onApplicationEnd, onApplicationStart, onBlockManagerAdded, onBlockManagerRemoved, onBlockUpdated, onEnvironmentUpdate, onExecutorAdded, onExecutorBlacklisted, onExecutorMetricsUpdate, onExecutorRemoved, onExecutorUnblacklisted, onJobEnd, onJobStart, onNodeBlacklisted, onNodeUnblacklisted, onOtherEvent, onSpeculativeTaskSubmitted, onStageSubmitted, onTaskGettingResult, onTaskStart, onUnpersistRDD
equals, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait
initializeLogging, initializeLogIfNecessary, initializeLogIfNecessary, isTraceEnabled, log_, log, logDebug, logDebug, logError, logError, logInfo, logInfo, logName, logTrace, logTrace, logWarning, logWarning
public static int[] percentiles()
public static double[] probabilities()
public static String percentilesHeader()
public static scala.Option<org.apache.spark.util.Distribution> extractDoubleDistribution(scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics, scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,Object> getMetric)
public static scala.Option<org.apache.spark.util.Distribution> extractLongDistribution(scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics, scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,Object> getMetric)
public static void showDistribution(String heading, org.apache.spark.util.Distribution d, scala.Function1<Object,String> formatNumber)
public static void showDistribution(String heading, scala.Option<org.apache.spark.util.Distribution> dOpt, scala.Function1<Object,String> formatNumber)
public static void showDistribution(String heading, scala.Option<org.apache.spark.util.Distribution> dOpt, String format)
public static void showDistribution(String heading, String format, scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,Object> getMetric, scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics)
public static void showBytesDistribution(String heading, scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,Object> getMetric, scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics)
public static void showBytesDistribution(String heading, scala.Option<org.apache.spark.util.Distribution> dOpt)
public static void showBytesDistribution(String heading, org.apache.spark.util.Distribution dist)
public static void showMillisDistribution(String heading, scala.Option<org.apache.spark.util.Distribution> dOpt)
public static void showMillisDistribution(String heading, scala.Function2<TaskInfo,org.apache.spark.executor.TaskMetrics,Object> getMetric, scala.collection.Seq<scala.Tuple2<TaskInfo,org.apache.spark.executor.TaskMetrics>> taskInfoMetrics)
public static long seconds()
public static long minutes()
public static long hours()
public static String millisToString(long ms)
ms
- (undocumented)public static void onStageSubmitted(SparkListenerStageSubmitted stageSubmitted)
public static void onTaskStart(SparkListenerTaskStart taskStart)
public static void onTaskGettingResult(SparkListenerTaskGettingResult taskGettingResult)
public static void onJobStart(SparkListenerJobStart jobStart)
public static void onJobEnd(SparkListenerJobEnd jobEnd)
public static void onEnvironmentUpdate(SparkListenerEnvironmentUpdate environmentUpdate)
public static void onBlockManagerAdded(SparkListenerBlockManagerAdded blockManagerAdded)
public static void onBlockManagerRemoved(SparkListenerBlockManagerRemoved blockManagerRemoved)
public static void onUnpersistRDD(SparkListenerUnpersistRDD unpersistRDD)
public static void onApplicationStart(SparkListenerApplicationStart applicationStart)
public static void onApplicationEnd(SparkListenerApplicationEnd applicationEnd)
public static void onExecutorMetricsUpdate(SparkListenerExecutorMetricsUpdate executorMetricsUpdate)
public static void onExecutorAdded(SparkListenerExecutorAdded executorAdded)
public static void onExecutorRemoved(SparkListenerExecutorRemoved executorRemoved)
public static void onExecutorBlacklisted(SparkListenerExecutorBlacklisted executorBlacklisted)
public static void onExecutorUnblacklisted(SparkListenerExecutorUnblacklisted executorUnblacklisted)
public static void onNodeBlacklisted(SparkListenerNodeBlacklisted nodeBlacklisted)
public static void onNodeUnblacklisted(SparkListenerNodeUnblacklisted nodeUnblacklisted)
public static void onBlockUpdated(SparkListenerBlockUpdated blockUpdated)
public static void onSpeculativeTaskSubmitted(SparkListenerSpeculativeTaskSubmitted speculativeTask)
public static void onOtherEvent(SparkListenerEvent event)
public void onTaskEnd(SparkListenerTaskEnd taskEnd)
onTaskEnd
in class SparkListener
taskEnd
- (undocumented)public void onStageCompleted(SparkListenerStageCompleted stageCompleted)
onStageCompleted
in class SparkListener
stageCompleted
- (undocumented)