org.apache.spark

SparkEnv

class SparkEnv extends AnyRef

Holds all the runtime environment objects for a running Spark instance (either master or worker), including the serializer, Akka actor system, block manager, map output tracker, etc. Currently Spark code finds the SparkEnv through a thread-local variable, so each thread that accesses these objects needs to have the right SparkEnv set. You can get the current environment with SparkEnv.get (e.g. after creating a SparkContext) and set it with SparkEnv.set.

Linear Supertypes
AnyRef, Any
Ordering
  1. Alphabetic
  2. By inheritance
Inherited
  1. Hide All
  2. Show all
  1. SparkEnv
  2. AnyRef
  3. Any
Visibility
  1. Public
  2. All

Instance Constructors

  1. new SparkEnv(executorId: String, actorSystem: ActorSystem, serializerManager: SerializerManager, serializer: Serializer, closureSerializer: Serializer, cacheManager: CacheManager, mapOutputTracker: MapOutputTracker, shuffleFetcher: ShuffleFetcher, broadcastManager: BroadcastManager, blockManager: BlockManager, connectionManager: ConnectionManager, httpFileServer: HttpFileServer, sparkFilesDir: String, metricsSystem: MetricsSystem)

Value Members

  1. final def !=(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  2. final def !=(arg0: Any): Boolean

    Definition Classes
    Any
  3. final def ##(): Int

    Definition Classes
    AnyRef → Any
  4. final def ==(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  5. final def ==(arg0: Any): Boolean

    Definition Classes
    Any
  6. val actorSystem: ActorSystem

  7. final def asInstanceOf[T0]: T0

    Definition Classes
    Any
  8. val blockManager: BlockManager

  9. val broadcastManager: BroadcastManager

  10. val cacheManager: CacheManager

  11. def clone(): AnyRef

    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws()
  12. val closureSerializer: Serializer

  13. val connectionManager: ConnectionManager

  14. def createPythonWorker(pythonExec: String, envVars: Map[String, String]): Socket

  15. final def eq(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  16. def equals(arg0: Any): Boolean

    Definition Classes
    AnyRef → Any
  17. val executorId: String

  18. def finalize(): Unit

    Attributes
    protected[lang]
    Definition Classes
    AnyRef
    Annotations
    @throws()
  19. final def getClass(): java.lang.Class[_]

    Definition Classes
    AnyRef → Any
  20. val hadoop: SparkHadoopUtil

  21. def hashCode(): Int

    Definition Classes
    AnyRef → Any
  22. val httpFileServer: HttpFileServer

  23. final def isInstanceOf[T0]: Boolean

    Definition Classes
    Any
  24. val mapOutputTracker: MapOutputTracker

  25. val metricsSystem: MetricsSystem

  26. final def ne(arg0: AnyRef): Boolean

    Definition Classes
    AnyRef
  27. final def notify(): Unit

    Definition Classes
    AnyRef
  28. final def notifyAll(): Unit

    Definition Classes
    AnyRef
  29. val serializer: Serializer

  30. val serializerManager: SerializerManager

  31. val shuffleFetcher: ShuffleFetcher

  32. val sparkFilesDir: String

  33. def stop(): Unit

  34. final def synchronized[T0](arg0: ⇒ T0): T0

    Definition Classes
    AnyRef
  35. def toString(): String

    Definition Classes
    AnyRef → Any
  36. final def wait(): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws()
  37. final def wait(arg0: Long, arg1: Int): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws()
  38. final def wait(arg0: Long): Unit

    Definition Classes
    AnyRef
    Annotations
    @throws()

Inherited from AnyRef

Inherited from Any